lamindb 0.72.1__py3-none-any.whl → 0.73.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,10 +1,11 @@
1
1
  from __future__ import annotations
2
2
 
3
+ from collections import defaultdict
3
4
  from typing import TYPE_CHECKING, Dict
4
5
 
5
6
  import numpy as np
6
7
  from lamin_utils import colors, logger
7
- from lnschema_core.models import Artifact, Collection, Data, Feature, LinkORM, Registry
8
+ from lnschema_core.models import Feature
8
9
 
9
10
  from lamindb._from_values import _print_values
10
11
  from lamindb._registry import (
@@ -19,77 +20,62 @@ from ._settings import settings
19
20
  from .schema import dict_related_model_to_related_name
20
21
 
21
22
  if TYPE_CHECKING:
23
+ from lnschema_core.models import Artifact, Collection, HasFeatures, Registry
24
+
22
25
  from lamindb._query_set import QuerySet
23
26
 
24
27
 
25
- def get_labels_as_dict(self: Data):
28
+ def get_labels_as_dict(self: HasFeatures, links: bool = False):
29
+ exclude_set = {
30
+ "feature_sets",
31
+ "unordered_artifacts",
32
+ "input_of",
33
+ "collections",
34
+ "source_code_of",
35
+ "report_of",
36
+ "environment_of",
37
+ "collection_links",
38
+ "artifact_links",
39
+ "feature_set_links",
40
+ "previous_runs",
41
+ "feature_values",
42
+ }
26
43
  labels = {} # type: ignore
27
44
  if self.id is None:
28
45
  return labels
29
46
  for related_model_name, related_name in dict_related_model_to_related_name(
30
- self.__class__
47
+ self.__class__, links=links
31
48
  ).items():
32
- if related_name in {
33
- "feature_sets",
34
- "unordered_artifacts",
35
- "input_of",
36
- "collections",
37
- "source_of",
38
- "report_of",
39
- "environment_of",
40
- }:
41
- continue
42
- labels[related_name] = (related_model_name, self.__getattribute__(related_name))
49
+ if related_name not in exclude_set:
50
+ labels[related_name] = (
51
+ related_model_name,
52
+ getattr(self, related_name).all(),
53
+ )
43
54
  return labels
44
55
 
45
56
 
46
- def print_labels(
47
- self: Data, field: str = "name", ignore_labels_with_feature: bool = True
48
- ):
57
+ def print_labels(self: HasFeatures, field: str = "name", print_types: bool = False):
49
58
  labels_msg = ""
50
59
  for related_name, (related_model, labels) in get_labels_as_dict(self).items():
60
+ # there is a try except block here to deal with schema inconsistencies
61
+ # during transfer between instances
51
62
  try:
52
63
  labels_list = list(labels.values_list(field, flat=True))
53
64
  if len(labels_list) > 0:
54
65
  get_default_str_field(labels)
55
- print_values = _print_values(labels_list[:20], n=10)
56
- labels_msg += f" 📎 {related_name} ({len(labels_list)}, {colors.italic(related_model)}): {print_values}\n"
66
+ print_values = _print_values(labels_list, n=10)
67
+ type_str = f": {related_model}" if print_types else ""
68
+ labels_msg += f" .{related_name}{type_str} = {print_values}\n"
57
69
  except Exception:
58
70
  continue
59
- if len(labels_msg) > 0:
60
- return f"{colors.green('Labels')}:\n{labels_msg}"
61
- else:
62
- return ""
63
-
64
-
65
- def transfer_add_labels(labels, features_lookup_self, self, row, parents: bool = True):
66
- def transfer_single_registry(validated_labels, new_labels):
67
- # here the new labels are transferred to the self db
68
- if len(new_labels) > 0:
69
- transfer_fk_to_default_db_bulk(new_labels, using_key=None)
70
- for label in new_labels:
71
- transfer_to_default_db(
72
- label, using_key=None, mute=True, transfer_fk=False
73
- )
74
- # not saving parents for Organism during transfer
75
- registry = new_labels[0].__class__
76
- logger.info(f"saving {len(new_labels)} new {registry.__name__} records")
77
- save(new_labels)
78
- # link labels records from self db
79
- self._host.labels.add(
80
- validated_labels + new_labels,
81
- feature=features_lookup_self.get(row["name"]),
82
- )
83
-
84
- # validate labels on the default db
85
- result = validate_labels(labels, parents=parents)
86
- if isinstance(result, Dict):
87
- for _, (validated_labels, new_labels) in result.items():
88
- transfer_single_registry(validated_labels, new_labels)
89
- else:
90
- transfer_single_registry(*result)
71
+ msg = ""
72
+ if labels_msg:
73
+ msg += f" {colors.italic('Labels')}\n"
74
+ msg += labels_msg
75
+ return msg
91
76
 
92
77
 
78
+ # Alex: is this a label transfer function?
93
79
  def validate_labels(labels: QuerySet | list | dict, parents: bool = True):
94
80
  def validate_labels_registry(
95
81
  labels: QuerySet | list | dict, parents: bool = True
@@ -168,7 +154,6 @@ class LabelManager:
168
154
  Args:
169
155
  records: Label records to add.
170
156
  feature: Feature under which to group the labels.
171
- field: Field to parse iterable with from_values.
172
157
  """
173
158
  from ._data import add_labels
174
159
 
@@ -191,8 +176,8 @@ class LabelManager:
191
176
 
192
177
  return get_labels(self._host, feature=feature, mute=mute, flat_names=flat_names)
193
178
 
194
- def add_from(self, data: Data, parents: bool = True):
195
- """Transfer labels from a file or collection.
179
+ def add_from(self, data: HasFeatures, parents: bool = True) -> None:
180
+ """Add labels from an artifact or collection to another artifact or collection.
196
181
 
197
182
  Examples:
198
183
  >>> file1 = ln.Artifact(pd.DataFrame(index=[0, 1]))
@@ -207,45 +192,61 @@ class LabelManager:
207
192
  """
208
193
  from django.db.utils import ProgrammingError
209
194
 
210
- features_lookup_self = {f.name: f for f in Feature.objects.filter().all()}
211
- features_lookup_data = {
212
- f.name: f for f in Feature.objects.using(data._state.db).filter().all()
213
- }
214
- for _, feature_set in data.features.feature_set_by_slot.items():
215
- # add labels stratified by feature
216
- if feature_set.registry == "Feature":
217
- # df_slot is the Feature table with type
218
- df_slot = feature_set.features.df()
219
- for _, row in df_slot.iterrows():
220
- if row["dtype"].startswith("cat["):
221
- logger.info(f"transferring {row['name']}")
222
- # labels records from data db
223
- labels = data.labels.get(
224
- features_lookup_data.get(row["name"]), mute=True
225
- )
226
- transfer_add_labels(
227
- labels, features_lookup_self, self, row, parents=parents
228
- )
229
- # TODO: for now, has to be duplicated
230
195
  using_key = settings._using_key
231
196
  for related_name, (_, labels) in get_labels_as_dict(data).items():
232
197
  labels = labels.all()
233
198
  try:
234
- if len(labels) == 0:
199
+ if not labels.exists():
235
200
  continue
236
- validated_labels, new_labels = validate_labels(labels, parents=parents)
201
+ # look for features
202
+ data_name_lower = data.__class__.__name__.lower()
203
+ labels_by_features = defaultdict(list)
204
+ features = set()
205
+ _, new_labels = validate_labels(labels, parents=parents)
237
206
  if len(new_labels) > 0:
238
207
  transfer_fk_to_default_db_bulk(new_labels, using_key)
239
- for label in new_labels:
208
+ for label in labels:
209
+ # if the link table doesn't follow this convention, we'll ignore it
210
+ if not hasattr(label, f"{data_name_lower}_links"):
211
+ key = None
212
+ else:
213
+ link = getattr(label, f"{data_name_lower}_links").get(
214
+ **{f"{data_name_lower}_id": data.id}
215
+ )
216
+ if link.feature is not None:
217
+ features.add(link.feature)
218
+ key = link.feature.name
219
+ else:
220
+ key = None
221
+ label_returned = transfer_to_default_db(
222
+ label,
223
+ using_key,
224
+ mute=True,
225
+ transfer_fk=False,
226
+ save=True,
227
+ )
228
+ # TODO: refactor return value of transfer to default db
229
+ if label_returned is not None:
230
+ label = label_returned
231
+ labels_by_features[key].append(label)
232
+ # treat features
233
+ _, new_features = validate_labels(list(features))
234
+ if len(new_features) > 0:
235
+ transfer_fk_to_default_db_bulk(new_features, using_key)
236
+ for feature in new_features:
240
237
  transfer_to_default_db(
241
- label, using_key, mute=True, transfer_fk=False
238
+ feature, using_key, mute=True, transfer_fk=False
242
239
  )
243
- save(new_labels, parents=parents)
244
- # this should not occur as file and collection should have the same attributes
245
- # but this might not be true for custom schema
246
- labels_list = validated_labels + new_labels
240
+ save(new_features, parents=parents)
247
241
  if hasattr(self._host, related_name):
248
- getattr(self._host, related_name).add(*labels_list)
242
+ for feature_name, labels in labels_by_features.items():
243
+ if feature_name is not None:
244
+ feature_id = Feature.filter(name=feature_name).one().id
245
+ else:
246
+ feature_id = None
247
+ getattr(self._host, related_name).add(
248
+ *labels, through_defaults={"feature_id": feature_id}
249
+ )
249
250
  # ProgrammingError is raised when schemas don't match between source and target instances
250
251
  except ProgrammingError:
251
252
  continue
@@ -14,11 +14,17 @@ from lnschema_core.models import Param, ParamValue, RunParamValue
14
14
  from lnschema_core.types import TransformType
15
15
  from lnschema_core.users import current_user_id
16
16
 
17
- from lamindb._save import save
18
17
  from lamindb.core._transform_settings import transform as transform_settings
19
18
 
20
19
  from ._settings import settings
21
20
  from ._sync_git import get_transform_reference_from_git_repo
21
+ from .exceptions import (
22
+ MissingTransformSettings,
23
+ NotebookNotSavedError,
24
+ NoTitleError,
25
+ UpdateTransformSettings,
26
+ )
27
+ from .versioning import bump_version as bump_version_function
22
28
 
23
29
  if TYPE_CHECKING:
24
30
  from lamindb_setup.core.types import UPathStr
@@ -32,22 +38,6 @@ msg_path_failed = (
32
38
  )
33
39
 
34
40
 
35
- class NotebookNotSavedError(Exception):
36
- pass
37
-
38
-
39
- class NoTitleError(Exception):
40
- pass
41
-
42
-
43
- class MissingTransformSettings(SystemExit):
44
- pass
45
-
46
-
47
- class UpdateTransformSettings(SystemExit):
48
- pass
49
-
50
-
51
41
  def get_uid_ext(version: str) -> str:
52
42
  from lamin_utils._base62 import encodebytes
53
43
 
@@ -86,13 +76,7 @@ def update_stem_uid_or_version(
86
76
  if bump_version:
87
77
  new_stem_uid = stem_uid
88
78
  if response == "bump":
89
- try:
90
- new_version = str(int(version) + 1)
91
- except ValueError:
92
- new_version = input(
93
- f"The current version is '{version}' - please type the new"
94
- " version: "
95
- )
79
+ new_version = bump_version_function(version, behavior="prompt")
96
80
  else:
97
81
  new_version = response
98
82
  updated = new_version != version
@@ -145,6 +129,44 @@ ln.settings.transform.stem_uid = "{stem_uid}"
145
129
  ln.settings.transform.version = "{version}"
146
130
  """
147
131
 
132
+ MESSAGE_UPDATE = """You updated your {transform_type}.
133
+
134
+ If this is a minor update, bump your version from {old_version} to:
135
+
136
+ ln.settings.transform.version = "{new_version_minor_bump}"
137
+
138
+ If this is a major update, bump it to:
139
+
140
+ ln.settings.transform.version = "{new_version_major_bump}"
141
+
142
+ If this is a new {transform_type}, set:
143
+
144
+ ln.settings.transform.stem_uid = "{new_stem_uid}"
145
+ ln.settings.transform.version = "1"
146
+
147
+ """
148
+
149
+
150
+ def raise_transform_settings_error_needs_update(old_version: str) -> None:
151
+ from lnschema_core.ids import base62_12
152
+
153
+ transform_type = "notebook" if is_run_from_ipython else "script"
154
+ new_stem_uid = base62_12()
155
+
156
+ raise UpdateTransformSettings(
157
+ MESSAGE_UPDATE.format(
158
+ transform_type=transform_type,
159
+ new_stem_uid=new_stem_uid,
160
+ old_version=old_version,
161
+ new_version_major_bump=bump_version_function(
162
+ old_version, bump_type="major", behavior="ignore"
163
+ ),
164
+ new_version_minor_bump=bump_version_function(
165
+ old_version, bump_type="minor", behavior="ignore"
166
+ ),
167
+ )
168
+ )
169
+
148
170
 
149
171
  def raise_transform_settings_error() -> None:
150
172
  from lnschema_core.ids import base62_12
@@ -227,7 +249,7 @@ class run_context:
227
249
  transform: Transform | None = None,
228
250
  new_run: bool | None = None,
229
251
  path: str | None = None,
230
- ) -> None:
252
+ ) -> Run:
231
253
  """Track notebook or script run.
232
254
 
233
255
  Creates or loads a global :class:`~lamindb.Run` that enables data
@@ -354,7 +376,7 @@ class run_context:
354
376
  from ._track_environment import track_environment
355
377
 
356
378
  track_environment(run)
357
- return None
379
+ return run
358
380
 
359
381
  @classmethod
360
382
  def _track_script(
@@ -474,14 +496,14 @@ class run_context:
474
496
  transform.key = key
475
497
  transform.save()
476
498
  logger.important(f"updated: {transform}")
477
- # check whether the notebook source code was already saved
499
+ # check whether transform source code was already saved
478
500
  if transform.source_code_id is not None:
479
501
  response = None
480
502
  if is_run_from_ipython:
481
503
  if os.getenv("LAMIN_TESTING") is None:
482
504
  response = input(
483
505
  "You already saved source code for this notebook."
484
- " Bump the version before a new run? (y/n)"
506
+ " Auto-bump the version before a new run? (y/n)"
485
507
  )
486
508
  else:
487
509
  response = "y"
@@ -492,7 +514,7 @@ class run_context:
492
514
  if os.getenv("LAMIN_TESTING") is None:
493
515
  response = input(
494
516
  "You already saved source code for this script and meanwhile modified it without bumping a version."
495
- " Bump the version before a new run? (y/n)"
517
+ " Auto-bump the version before a new run? (y/n)"
496
518
  )
497
519
  else:
498
520
  response = "y"
@@ -503,8 +525,10 @@ class run_context:
503
525
  if response == "y":
504
526
  update_stem_uid_or_version(stem_uid, version, bump_version=True)
505
527
  else:
506
- # we want a new stem_uid in this case, hence raise the error
507
- raise_transform_settings_error()
528
+ # the user didn't agree to auto-bump, hence treat manually
529
+ raise_transform_settings_error_needs_update(
530
+ old_version=transform.version
531
+ )
508
532
  else:
509
533
  logger.important(f"loaded: {transform}")
510
534
  cls.transform = transform
lamindb/core/_sync_git.py CHANGED
@@ -20,7 +20,9 @@ def get_git_repo_from_remote() -> Path:
20
20
  if repo_dir.exists():
21
21
  logger.warning(f"git repo {repo_dir} already exists locally")
22
22
  return repo_dir
23
- logger.important(f"cloning {repo_url} into {repo_dir}")
23
+ logger.important(
24
+ f"running outside of synched git repo, cloning {repo_url} into {repo_dir}"
25
+ )
24
26
  result = subprocess.run(
25
27
  f"git clone --depth 10 {repo_url}.git",
26
28
  shell=True,
@@ -48,8 +50,7 @@ def check_local_git_repo() -> bool:
48
50
  # running-in-correct-git-repo
49
51
  return True
50
52
  else:
51
- # running-in-wrong-git-repo
52
- logger.warning("running in wrong git repo")
53
+ # running-outside-of-correct-git-repo
53
54
  return False
54
55
 
55
56
 
@@ -135,7 +135,7 @@ def dir_iris_images() -> UPath: # pragma: no cover
135
135
 
136
136
  Based on: https://github.com/laminlabs/lamindb-dev-datasets/pull/2
137
137
  """
138
- return UPath("s3://lamindb-dev-datasets/iris_studies")
138
+ return UPath("s3://lamindata/iris_studies")
139
139
 
140
140
 
141
141
  def anndata_mouse_sc_lymph_node(
@@ -6,11 +6,44 @@ The registry base class:
6
6
  :toctree: .
7
7
 
8
8
  ValidationError
9
+ NotebookNotSavedError
10
+ NoTitleError
11
+ MissingTransformSettings
12
+ UpdateTransformSettings
9
13
 
10
14
  """
11
15
 
12
16
 
13
- class ValidationError(Exception):
17
+ class ValidationError(SystemExit):
14
18
  """Validation error: not mapped in registry."""
15
19
 
16
20
  pass
21
+
22
+
23
+ # -------------------------------------------------------------------------------------
24
+ # ln.track() AKA run_context
25
+ # -------------------------------------------------------------------------------------
26
+
27
+
28
+ class NotebookNotSavedError(Exception):
29
+ """Notebook wasn't saved."""
30
+
31
+ pass
32
+
33
+
34
+ class NoTitleError(Exception):
35
+ """Notebook has no title."""
36
+
37
+ pass
38
+
39
+
40
+ class MissingTransformSettings(SystemExit):
41
+ """User didn't define transform settings."""
42
+
43
+ pass
44
+
45
+
46
+ class UpdateTransformSettings(SystemExit):
47
+ """Transform settings require update."""
48
+
49
+ pass
lamindb/core/schema.py CHANGED
@@ -1,5 +1,6 @@
1
1
  from typing import Type
2
2
 
3
+ from django.db.models import ManyToManyField
3
4
  from lnschema_core.models import Feature, FeatureSet, LinkORM, Registry
4
5
 
5
6
 
@@ -19,20 +20,22 @@ def dict_schema_name_to_model_name(orm: Type[Registry]) -> dict[str, Registry]:
19
20
  return d
20
21
 
21
22
 
22
- def dict_related_model_to_related_name(orm: Type[Registry]) -> dict[str, str]:
23
+ def dict_related_model_to_related_name(
24
+ orm: Type[Registry], links: bool = False
25
+ ) -> dict[str, str]:
26
+ def include(model: Registry):
27
+ return not links != issubclass(model, LinkORM)
28
+
29
+ related_objects = orm._meta.related_objects + orm._meta.many_to_many
23
30
  d: dict = {
24
- i.related_model.__get_name_with_schema__(): i.related_name
25
- for i in orm._meta.related_objects
26
- if (i.name is not None and not issubclass(i.related_model, LinkORM))
31
+ record.related_model.__get_name_with_schema__(): (
32
+ record.related_name
33
+ if not isinstance(record, ManyToManyField)
34
+ else record.name
35
+ )
36
+ for record in related_objects
37
+ if (record.name is not None and include(record.related_model))
27
38
  }
28
- d.update(
29
- {
30
- i.related_model.__get_name_with_schema__(): i.name
31
- for i in orm._meta.many_to_many
32
- if (i.name is not None and not issubclass(i.related_model, LinkORM))
33
- }
34
- )
35
-
36
39
  return d
37
40
 
38
41
 
@@ -106,12 +106,17 @@ def read_adata_h5ad(filepath, **kwargs) -> ad.AnnData:
106
106
  return adata
107
107
 
108
108
 
109
- def store_file_or_folder(local_path: UPathStr, storage_path: UPath) -> None:
109
+ def store_file_or_folder(
110
+ local_path: UPathStr, storage_path: UPath, print_progress: bool = True
111
+ ) -> None:
110
112
  """Store file or folder (localpath) at storagepath."""
111
113
  local_path = Path(local_path)
112
114
  if not isinstance(storage_path, LocalPathClasses):
113
115
  # this uploads files and directories
114
- storage_path.upload_from(local_path, dir_inplace=True, print_progress=True)
116
+ create_folder = False if local_path.is_dir() else None
117
+ storage_path.upload_from(
118
+ local_path, create_folder=create_folder, print_progress=print_progress
119
+ )
115
120
  else: # storage path is local
116
121
  storage_path.parent.mkdir(parents=True, exist_ok=True)
117
122
  if local_path.is_file():
@@ -132,7 +137,9 @@ def delete_storage_using_key(
132
137
  delete_storage(filepath)
133
138
 
134
139
 
135
- def delete_storage(storagepath: Path):
140
+ def delete_storage(
141
+ storagepath: Path, raise_file_not_found_error: bool = True
142
+ ) -> None | str:
136
143
  """Delete arbitrary artifact."""
137
144
  # TODO is_relative_to is not available in 3.8 and deprecated since 3.12
138
145
  # replace with check_path_is_child_of_root but this needs to first be debugged
@@ -156,8 +163,11 @@ def delete_storage(storagepath: Path):
156
163
  shutil.rmtree(storagepath)
157
164
  else:
158
165
  storagepath.rmdir()
159
- else:
166
+ elif raise_file_not_found_error:
160
167
  raise FileNotFoundError(f"{storagepath} is not an existing path!")
168
+ else:
169
+ logger.warning(f"{storagepath} is not an existing path!")
170
+ return None
161
171
 
162
172
 
163
173
  # tested in lamin-usecases
@@ -1,7 +1,8 @@
1
1
  from __future__ import annotations
2
2
 
3
- from typing import TYPE_CHECKING, Optional, Tuple
3
+ from typing import TYPE_CHECKING, Literal
4
4
 
5
+ from lamin_utils import logger
5
6
  from lamindb_setup.core.upath import LocalPathClasses, UPath
6
7
  from lnschema_core import ids
7
8
 
@@ -9,6 +10,51 @@ if TYPE_CHECKING:
9
10
  from lnschema_core.models import IsVersioned
10
11
 
11
12
 
13
+ def bump_version(
14
+ version: str,
15
+ bump_type: str = "minor",
16
+ behavior: Literal["prompt", "error", "ignore"] = "error",
17
+ ) -> str:
18
+ """Bumps the version number by major or minor depending on the bump_type flag.
19
+
20
+ Parameters:
21
+ version (str): The current version in "MAJOR" or "MAJOR.MINOR" format.
22
+ bump_type (str): The type of version bump, either 'major' or 'minor'.
23
+
24
+ Returns:
25
+ str: The new version string.
26
+ """
27
+ try:
28
+ # Split the version into major and minor parts if possible
29
+ parts = version.split(".")
30
+ major = int(parts[0])
31
+ minor = int(parts[1]) if len(parts) > 1 else 0
32
+
33
+ if bump_type == "major":
34
+ # Bump the major version and reset the minor version
35
+ new_version = f"{major + 1}"
36
+ elif bump_type == "minor":
37
+ # Bump the minor version
38
+ new_version = f"{major}.{minor + 1}"
39
+ else:
40
+ raise ValueError("bump_type must be 'major' or 'minor'")
41
+
42
+ except (ValueError, IndexError):
43
+ if behavior == "prompt":
44
+ new_version = input(
45
+ f"The current version is '{version}' - please type the new version: "
46
+ )
47
+ elif behavior == "error":
48
+ raise ValueError(
49
+ "Cannot auto-increment non-integer castable version, please provide"
50
+ " manually"
51
+ ) from None
52
+ else:
53
+ logger.warning("could not auto-increment version, fix '?' manually")
54
+ new_version = "?"
55
+ return new_version
56
+
57
+
12
58
  def set_version(version: str | None = None, previous_version: str | None = None):
13
59
  """(Auto-) set version.
14
60
 
@@ -22,13 +68,7 @@ def set_version(version: str | None = None, previous_version: str | None = None)
22
68
  if version == previous_version:
23
69
  raise ValueError(f"Please increment the previous version: '{previous_version}'")
24
70
  if version is None and previous_version is not None:
25
- try:
26
- version = str(int(previous_version) + 1) # increment version by 1
27
- except ValueError:
28
- raise ValueError(
29
- "Cannot auto-increment non-integer castable version, please provide"
30
- " manually"
31
- ) from None
71
+ version = bump_version(previous_version, bump_type="major")
32
72
  return version
33
73
 
34
74
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: lamindb
3
- Version: 0.72.1
3
+ Version: 0.73.0
4
4
  Summary: A data framework for biology.
5
5
  Author-email: Lamin Labs <open-source@lamin.ai>
6
6
  Requires-Python: >=3.8
@@ -9,10 +9,10 @@ Classifier: Programming Language :: Python :: 3.8
9
9
  Classifier: Programming Language :: Python :: 3.9
10
10
  Classifier: Programming Language :: Python :: 3.10
11
11
  Classifier: Programming Language :: Python :: 3.11
12
- Requires-Dist: lnschema_core==0.67.1
13
- Requires-Dist: lamindb_setup==0.72.2
12
+ Requires-Dist: lnschema_core==0.68.0
13
+ Requires-Dist: lamindb_setup==0.73.0
14
14
  Requires-Dist: lamin_utils==0.13.2
15
- Requires-Dist: lamin_cli==0.13.2
15
+ Requires-Dist: lamin_cli==0.14.0
16
16
  Requires-Dist: rapidfuzz
17
17
  Requires-Dist: pyarrow
18
18
  Requires-Dist: typing_extensions!=4.6.0
@@ -33,6 +33,7 @@ Requires-Dist: laminci>=0.3 ; extra == "dev"
33
33
  Requires-Dist: pytest>=6.0 ; extra == "dev"
34
34
  Requires-Dist: coverage ; extra == "dev"
35
35
  Requires-Dist: pytest-cov ; extra == "dev"
36
+ Requires-Dist: mudata ; extra == "dev"
36
37
  Requires-Dist: nbproject_test>=0.5.1 ; extra == "dev"
37
38
  Requires-Dist: faker-biology ; extra == "dev"
38
39
  Requires-Dist: django-schema-graph ; extra == "erdiagram"