lamindb 0.69.10__py3-none-any.whl → 0.70.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
lamindb/__init__.py CHANGED
@@ -40,7 +40,7 @@ Modules & settings:
40
40
 
41
41
  """
42
42
 
43
- __version__ = "0.69.10" # denote a release candidate for 0.1.0 with 0.1rc1
43
+ __version__ = "0.70.1" # denote a release candidate for 0.1.0 with 0.1rc1
44
44
 
45
45
  import os as _os
46
46
 
lamindb/_artifact.py CHANGED
@@ -37,7 +37,7 @@ from lamindb.core.storage import (
37
37
  size_adata,
38
38
  write_to_file,
39
39
  )
40
- from lamindb.core.storage.file import (
40
+ from lamindb.core.storage.paths import (
41
41
  auto_storage_key_from_artifact,
42
42
  auto_storage_key_from_artifact_uid,
43
43
  filepath_from_artifact,
@@ -50,8 +50,8 @@ from .core._data import (
50
50
  save_feature_set_links,
51
51
  save_feature_sets,
52
52
  )
53
- from .core.storage.file import AUTO_KEY_PREFIX
54
- from .core.storage.object import _mudata_is_installed
53
+ from .core.storage.objects import _mudata_is_installed
54
+ from .core.storage.paths import AUTO_KEY_PREFIX
55
55
 
56
56
  if TYPE_CHECKING:
57
57
  from lamindb_setup.core.types import UPathStr
@@ -167,7 +167,7 @@ def process_data(
167
167
  # Alex: I don't understand the line below
168
168
  if path.suffixes == []:
169
169
  path = path.with_suffix(suffix)
170
- if suffix not in {".zarr", ".zrad"}:
170
+ if suffix != ".zarr":
171
171
  write_to_file(data, path)
172
172
  use_existing_storage_key = False
173
173
  else:
@@ -188,11 +188,7 @@ def get_stat_or_artifact(
188
188
  n_objects = None
189
189
  if settings.upon_file_create_skip_size_hash:
190
190
  return None, None, None, n_objects
191
- if (
192
- suffix in {".zarr", ".zrad"}
193
- and memory_rep is not None
194
- and isinstance(memory_rep, AnnData)
195
- ):
191
+ if suffix == ".zarr" and memory_rep is not None and isinstance(memory_rep, AnnData):
196
192
  size = size_adata(memory_rep)
197
193
  return size, None, None, n_objects
198
194
  stat = path.stat() # one network request
@@ -238,7 +234,7 @@ def get_stat_or_artifact(
238
234
  "💡 you can make this error a warning:\n"
239
235
  " ln.settings.upon_artifact_create_if_hash_exists"
240
236
  )
241
- raise RuntimeError(f"{msg}\n{hint}")
237
+ raise FileExistsError(f"{msg}\n{hint}")
242
238
  elif settings.upon_artifact_create_if_hash_exists == "warn_create_new":
243
239
  logger.warning(
244
240
  "creating new Artifact object despite existing artifact with same hash:"
@@ -246,10 +242,12 @@ def get_stat_or_artifact(
246
242
  )
247
243
  return size, hash, hash_type, n_objects
248
244
  else:
249
- from_trash = "(from trash)" if result[0].visibility == -1 else ""
250
- logger.warning(
251
- f"returning existing artifact with same hash{from_trash}: {result[0]}"
252
- )
245
+ if result[0].visibility == -1:
246
+ raise FileExistsError(
247
+ f"You're trying to re-create this artifact in trash: {result[0]}"
248
+ "Either permanently delete it with `artifact.delete(permanent=True)` or restore it with `artifact.restore()`"
249
+ )
250
+ logger.warning(f"returning existing artifact with same hash: {result[0]}")
253
251
  return result[0]
254
252
  else:
255
253
  return size, hash, hash_type, n_objects
@@ -331,6 +329,15 @@ def get_artifact_kwargs_from_data(
331
329
  using_key=using_key,
332
330
  )
333
331
  if isinstance(stat_or_artifact, Artifact):
332
+ # update the run of the existing artifact
333
+ if run is not None:
334
+ # save the information that this artifact was previously
335
+ # produced by another run
336
+ if stat_or_artifact.run is not None:
337
+ stat_or_artifact.run.replicated_output_artifacts.add(stat_or_artifact)
338
+ # update the run of the artifact with the latest run
339
+ stat_or_artifact.run = run
340
+ stat_or_artifact.transform = run.transform
334
341
  return stat_or_artifact, None
335
342
  else:
336
343
  size, hash, hash_type, n_objects = stat_or_artifact
@@ -439,7 +446,12 @@ def data_is_anndata(data: AnnData | UPathStr):
439
446
  if isinstance(data, AnnData):
440
447
  return True
441
448
  if isinstance(data, (str, Path, UPath)):
442
- return Path(data).suffix in {".h5ad", ".zrad"}
449
+ if Path(data).suffix == ".h5ad":
450
+ return True
451
+ elif Path(data).suffix == ".zarr":
452
+ raise NotImplementedError(
453
+ "auto-detecting AnnData from Zarr is not yet supported"
454
+ )
443
455
  return False
444
456
 
445
457
 
@@ -821,7 +833,7 @@ def replace(
821
833
 
822
834
  # docstring handled through attach_func_to_class_method
823
835
  def backed(self, is_run_input: bool | None = None) -> AnnDataAccessor | BackedAccessor:
824
- suffixes = (".h5", ".hdf5", ".h5ad", ".zrad", ".zarr")
836
+ suffixes = (".h5", ".hdf5", ".h5ad", ".zarr")
825
837
  if self.suffix not in suffixes:
826
838
  raise ValueError(
827
839
  "Artifact should have a zarr or h5 object as the underlying data, please"
@@ -854,7 +866,7 @@ def load(self, is_run_input: bool | None = None, stream: bool = False, **kwargs)
854
866
 
855
867
 
856
868
  # docstring handled through attach_func_to_class_method
857
- def stage(self, is_run_input: bool | None = None) -> Path:
869
+ def cache(self, is_run_input: bool | None = None) -> Path:
858
870
  _track_run_input(self, is_run_input)
859
871
 
860
872
  using_key = settings._using_key
@@ -932,10 +944,10 @@ def _delete_skip_storage(artifact, *args, **kwargs) -> None:
932
944
 
933
945
 
934
946
  # docstring handled through attach_func_to_class_method
935
- def save(self, *args, **kwargs) -> None:
947
+ def save(self, upload: bool | None = None, **kwargs) -> None:
936
948
  access_token = kwargs.pop("access_token", None)
937
949
 
938
- self._save_skip_storage(*args, **kwargs)
950
+ self._save_skip_storage(**kwargs)
939
951
 
940
952
  from lamindb._save import check_and_attempt_clearing, check_and_attempt_upload
941
953
 
@@ -951,9 +963,9 @@ def save(self, *args, **kwargs) -> None:
951
963
  raise RuntimeError(exception)
952
964
 
953
965
 
954
- def _save_skip_storage(file, *args, **kwargs) -> None:
966
+ def _save_skip_storage(file, **kwargs) -> None:
955
967
  save_feature_sets(file)
956
- super(Artifact, file).save(*args, **kwargs)
968
+ super(Artifact, file).save(**kwargs)
957
969
  save_feature_set_links(file)
958
970
 
959
971
 
@@ -998,7 +1010,7 @@ METHOD_NAMES = [
998
1010
  "from_df",
999
1011
  "from_mudata",
1000
1012
  "backed",
1001
- "stage",
1013
+ "cache",
1002
1014
  "load",
1003
1015
  "delete",
1004
1016
  "save",
@@ -1024,5 +1036,6 @@ for name in METHOD_NAMES:
1024
1036
  Artifact._delete_skip_storage = _delete_skip_storage
1025
1037
  Artifact._save_skip_storage = _save_skip_storage
1026
1038
  Artifact.path = path
1039
+ Artifact.stage = cache
1027
1040
  # this seems a Django-generated function
1028
1041
  delattr(Artifact, "get_visibility_display")
lamindb/_can_validate.py CHANGED
@@ -30,6 +30,7 @@ def inspect(
30
30
  *,
31
31
  mute: bool = False,
32
32
  organism: str | Registry | None = None,
33
+ public_source: Registry | None = None,
33
34
  ) -> InspectResult:
34
35
  """{}."""
35
36
  return _inspect(
@@ -38,6 +39,7 @@ def inspect(
38
39
  field=field,
39
40
  mute=mute,
40
41
  organism=organism,
42
+ public_source=public_source,
41
43
  )
42
44
 
43
45
 
@@ -63,6 +65,7 @@ def _inspect(
63
65
  mute: bool = False,
64
66
  using_key: str | None = None,
65
67
  organism: str | Registry | None = None,
68
+ public_source: Registry | None = None,
66
69
  ) -> pd.DataFrame | dict[str, list[str]]:
67
70
  """{}."""
68
71
  from lamin_utils._inspect import inspect
@@ -86,9 +89,9 @@ def _inspect(
86
89
 
87
90
  if len(nonval) > 0 and orm.__get_schema_name__() == "bionty":
88
91
  try:
89
- bionty_result = orm.public(organism=organism).inspect(
90
- values=nonval, field=field, mute=True
91
- )
92
+ bionty_result = orm.public(
93
+ organism=organism, public_source=public_source
94
+ ).inspect(values=nonval, field=field, mute=True)
92
95
  bionty_validated = bionty_result.validated
93
96
  bionty_mapper = bionty_result.synonyms_mapper
94
97
  hint = False
lamindb/_collection.py CHANGED
@@ -116,6 +116,17 @@ def __init__(
116
116
  logger.warning(
117
117
  f"returning existing collection with same hash: {existing_collection}"
118
118
  )
119
+ # update the run of the existing artifact
120
+ if run is not None:
121
+ # save the information that this artifact was previously
122
+ # produced by another run
123
+ if existing_collection.run is not None:
124
+ existing_collection.run.replicated_output_collections.add(
125
+ existing_collection
126
+ )
127
+ # update the run of the artifact with the latest run
128
+ existing_collection.run = run
129
+ existing_collection.transform = run.transform
119
130
  init_self_from_db(collection, existing_collection)
120
131
  for slot, feature_set in collection.features._feature_set_by_slot.items():
121
132
  if slot in feature_sets:
@@ -223,11 +234,11 @@ def mapped(
223
234
  ) -> MappedCollection:
224
235
  path_list = []
225
236
  for artifact in self.artifacts.all():
226
- if artifact.suffix not in {".h5ad", ".zrad", ".zarr"}:
237
+ if artifact.suffix not in {".h5ad", ".zarr"}:
227
238
  logger.warning(f"Ignoring artifact with suffix {artifact.suffix}")
228
239
  continue
229
240
  elif not stream:
230
- path_list.append(artifact.stage())
241
+ path_list.append(artifact.cache())
231
242
  else:
232
243
  path_list.append(artifact.path)
233
244
  ds = MappedCollection(
@@ -248,11 +259,11 @@ def mapped(
248
259
 
249
260
 
250
261
  # docstring handled through attach_func_to_class_method
251
- def stage(self, is_run_input: bool | None = None) -> list[UPath]:
262
+ def cache(self, is_run_input: bool | None = None) -> list[UPath]:
252
263
  _track_run_input(self, is_run_input)
253
264
  path_list = []
254
265
  for artifact in self.artifacts.all():
255
- path_list.append(artifact.stage())
266
+ path_list.append(artifact.cache())
256
267
  return path_list
257
268
 
258
269
 
@@ -355,7 +366,7 @@ def artifacts(self) -> QuerySet:
355
366
  METHOD_NAMES = [
356
367
  "__init__",
357
368
  "mapped",
358
- "stage",
369
+ "cache",
359
370
  "load",
360
371
  "delete",
361
372
  "save",
@@ -377,3 +388,4 @@ for name in METHOD_NAMES:
377
388
  # this seems a Django-generated function
378
389
  delattr(Collection, "get_visibility_display")
379
390
  Collection.artifacts = artifacts
391
+ Collection.stage = cache
lamindb/_finish.py CHANGED
@@ -43,39 +43,35 @@ def finish(i_saved_the_notebook: bool = False):
43
43
  "Please pass `i_saved_the_notebook=True` to `ln.finish()`, save the notebook, and re-run this cell."
44
44
  )
45
45
  return None
46
- notebook_content = read_notebook(run_context.path) # type: ignore
47
- if not check_last_cell(notebook_content, "i_saved_the_notebook"):
48
- raise CallFinishInLastCell(
49
- "Can only run `ln.finish(i_saved_the_notebook=True)` from the last code cell of the notebook."
50
- )
51
46
  save_run_context_core(
52
47
  run=run_context.run,
53
48
  transform=run_context.transform,
54
49
  filepath=run_context.path,
55
50
  finished_at=True,
56
- notebook_content=notebook_content,
57
51
  )
58
52
  else:
59
53
  # scripts
54
+ # save_run_context_core was already called during ln.track()
60
55
  run_context.run.finished_at = datetime.now(timezone.utc) # update run time
61
56
  run_context.run.save()
62
57
 
63
58
 
64
- # do not type because we need to be aware of lnschema_core import order
65
59
  def save_run_context_core(
66
60
  *,
67
61
  run: Run,
68
62
  transform: Transform,
69
63
  filepath: Path,
70
64
  transform_family: QuerySet | None = None,
71
- is_consecutive: bool = True,
72
65
  finished_at: bool = False,
73
- notebook_content=None, # nbproject.Notebook
74
66
  ) -> str | None:
75
67
  import lamindb as ln
76
68
 
77
69
  ln.settings.verbosity = "success"
78
70
 
71
+ # for scripts, things are easy
72
+ is_consecutive = True
73
+ source_code_path = filepath
74
+ # for notebooks, we need more work
79
75
  if transform.type == TransformType.notebook:
80
76
  try:
81
77
  import nbstripout
@@ -88,62 +84,52 @@ def save_run_context_core(
88
84
  "install nbproject & nbstripout: pip install nbproject nbstripout"
89
85
  )
90
86
  return None
91
- if notebook_content is None:
92
- notebook_content = read_notebook(filepath) # type: ignore
87
+ notebook_content = read_notebook(filepath) # type: ignore
93
88
  is_consecutive = check_consecutiveness(notebook_content)
94
89
  if not is_consecutive:
90
+ msg = " Do you still want to proceed with finishing? (y/n) "
95
91
  if os.getenv("LAMIN_TESTING") is None:
96
- decide = input(
97
- " Do you still want to proceed with publishing? (y/n) "
98
- )
92
+ response = input(msg)
99
93
  else:
100
- decide = "n"
101
- if decide != "y":
102
- logger.error("Aborted (non-consecutive)!")
94
+ response = "n"
95
+ if response != "y":
103
96
  return "aborted-non-consecutive"
104
-
105
97
  # convert the notebook file to html
106
98
  # log_level is set to 40 to silence the nbconvert logging
107
- result = subprocess.run(
99
+ subprocess.run(
108
100
  "jupyter nbconvert --to html"
109
101
  f" {filepath.as_posix()} --Application.log_level=40",
110
102
  shell=True,
103
+ check=True,
111
104
  )
112
105
  # move the temporary file into the cache dir in case it's accidentally
113
106
  # in an existing storage location -> we want to move associated
114
107
  # artifacts into default storage and not register them in an existing
115
108
  # location
116
- filepath_html = filepath.with_suffix(".html") # current location
109
+ filepath_html_orig = filepath.with_suffix(".html") # current location
110
+ filepath_html = ln_setup.settings.storage.cache_dir / filepath_html_orig.name
111
+ # don't use Path.rename here because of cross-device link error
112
+ # https://laminlabs.slack.com/archives/C04A0RMA0SC/p1710259102686969
117
113
  shutil.move(
118
- filepath_html, # type: ignore
119
- ln_setup.settings.storage.cache_dir / filepath_html.name,
120
- ) # move; don't use Path.rename here because of cross-device link error
121
- # see https://laminlabs.slack.com/archives/C04A0RMA0SC/p1710259102686969
122
- filepath_html = (
123
- ln_setup.settings.storage.cache_dir / filepath_html.name
124
- ) # adjust location
125
- assert result.returncode == 0
126
- # copy the notebook file to a temporary file
114
+ filepath_html_orig, # type: ignore
115
+ filepath_html,
116
+ )
117
+ # strip the output from the notebook to create the source code file
118
+ # first, copy the notebook file to a temporary file in the cache
127
119
  source_code_path = ln_setup.settings.storage.cache_dir / filepath.name
128
120
  shutil.copy2(filepath, source_code_path) # copy
129
- result = subprocess.run(f"nbstripout {source_code_path}", shell=True)
130
- assert result.returncode == 0
131
- else:
132
- source_code_path = filepath
121
+ subprocess.run(f"nbstripout {source_code_path}", shell=True, check=True)
133
122
  # find initial versions of source codes and html reports
134
- initial_report = None
135
- initial_source = None
123
+ prev_report = None
124
+ prev_source = None
136
125
  if transform_family is None:
137
126
  transform_family = transform.versions
138
127
  if len(transform_family) > 0:
139
128
  for prev_transform in transform_family.order_by("-created_at"):
140
- # check for id to avoid query
141
129
  if prev_transform.latest_report_id is not None:
142
- # any previous latest report of this transform is OK!
143
- initial_report = prev_transform.latest_report
130
+ prev_report = prev_transform.latest_report
144
131
  if prev_transform.source_code_id is not None:
145
- # any previous source code id is OK!
146
- initial_source = prev_transform.source_code
132
+ prev_source = prev_transform.source_code
147
133
  ln.settings.silence_file_run_transform_warning = True
148
134
  # register the source code
149
135
  if transform.source_code is not None:
@@ -173,7 +159,7 @@ def save_run_context_core(
173
159
  source_code_path,
174
160
  description=f"Source of transform {transform.uid}",
175
161
  version=transform.version,
176
- is_new_version_of=initial_source,
162
+ is_new_version_of=prev_source,
177
163
  visibility=0, # hidden file
178
164
  run=False,
179
165
  )
@@ -207,7 +193,7 @@ def save_run_context_core(
207
193
  report_file = ln.Artifact(
208
194
  filepath_html,
209
195
  description=f"Report of run {run.uid}",
210
- is_new_version_of=initial_report,
196
+ is_new_version_of=prev_report,
211
197
  visibility=0, # hidden file
212
198
  run=False,
213
199
  )
lamindb/_from_values.py CHANGED
@@ -21,6 +21,7 @@ def get_or_create_records(
21
21
  from_public: bool = False,
22
22
  organism: Registry | str | None = None,
23
23
  public_source: Registry | None = None,
24
+ mute: bool = False,
24
25
  ) -> list[Registry]:
25
26
  """Get or create records from iterables."""
26
27
  upon_create_search_names = settings.upon_create_search_names
@@ -38,14 +39,18 @@ def get_or_create_records(
38
39
 
39
40
  # returns existing records & non-existing values
40
41
  records, nonexist_values, msg = get_existing_records(
41
- iterable_idx=iterable_idx, field=field, **kwargs
42
+ iterable_idx=iterable_idx, field=field, mute=mute, **kwargs
42
43
  )
43
44
 
44
45
  # new records to be created based on new values
45
46
  if len(nonexist_values) > 0:
46
47
  if from_public:
47
48
  records_bionty, unmapped_values = create_records_from_public(
48
- iterable_idx=nonexist_values, field=field, msg=msg, **kwargs
49
+ iterable_idx=nonexist_values,
50
+ field=field,
51
+ msg=msg,
52
+ mute=mute,
53
+ **kwargs,
49
54
  )
50
55
  if len(records_bionty) > 0:
51
56
  msg = ""
@@ -56,16 +61,17 @@ def get_or_create_records(
56
61
  unmapped_values = nonexist_values
57
62
  # unmapped new_ids will NOT create records
58
63
  if len(unmapped_values) > 0:
59
- if len(msg) > 0:
64
+ if len(msg) > 0 and not mute:
60
65
  logger.success(msg)
61
66
  s = "" if len(unmapped_values) == 1 else "s"
62
67
  print_values = colors.yellow(_print_values(unmapped_values))
63
68
  name = Registry.__name__
64
69
  n_nonval = colors.yellow(f"{len(unmapped_values)} non-validated")
65
- logger.warning(
66
- f"{colors.red('did not create')} {name} record{s} for "
67
- f"{n_nonval} {colors.italic(f'{field.field.name}{s}')}: {print_values}"
68
- )
70
+ if not mute:
71
+ logger.warning(
72
+ f"{colors.red('did not create')} {name} record{s} for "
73
+ f"{n_nonval} {colors.italic(f'{field.field.name}{s}')}: {print_values}"
74
+ )
69
75
  if Registry.__module__.startswith("lnschema_bionty.") or Registry == ULabel:
70
76
  if isinstance(iterable, pd.Series):
71
77
  feature = iterable.name
@@ -85,6 +91,7 @@ def get_or_create_records(
85
91
  def get_existing_records(
86
92
  iterable_idx: pd.Index,
87
93
  field: StrField,
94
+ mute: bool = False,
88
95
  **kwargs,
89
96
  ):
90
97
  model = field.field.model
@@ -96,7 +103,11 @@ def get_existing_records(
96
103
  # standardize based on the DB reference
97
104
  # log synonyms mapped terms
98
105
  result = model.inspect(
99
- iterable_idx, field=field, organism=kwargs.get("organism"), mute=True
106
+ iterable_idx,
107
+ field=field,
108
+ organism=kwargs.get("organism"),
109
+ public_source=kwargs.get("public_source"),
110
+ mute=True,
100
111
  )
101
112
  syn_mapper = result.synonyms_mapper
102
113
 
@@ -146,9 +157,10 @@ def get_existing_records(
146
157
  # no logging if all values are validated
147
158
  # logs if there are synonyms
148
159
  if len(syn_msg) > 0:
149
- if len(msg) > 0:
160
+ if len(msg) > 0 and not mute:
150
161
  logger.success(msg)
151
- logger.success(syn_msg)
162
+ if not mute:
163
+ logger.success(syn_msg)
152
164
  msg = ""
153
165
 
154
166
  existing_values = iterable_idx.intersection(
@@ -163,6 +175,7 @@ def create_records_from_public(
163
175
  iterable_idx: pd.Index,
164
176
  field: StrField,
165
177
  msg: str = "",
178
+ mute: bool = False,
166
179
  **kwargs,
167
180
  ):
168
181
  model = field.field.model
@@ -219,19 +232,20 @@ def create_records_from_public(
219
232
  s = "" if len(validated) == 1 else "s"
220
233
  print_values = colors.purple(_print_values(validated))
221
234
  # this is the success msg for existing records in the DB
222
- if len(msg) > 0:
235
+ if len(msg) > 0 and not mute:
223
236
  logger.success(msg)
224
- logger.success(
225
- "created"
226
- f" {colors.purple(f'{len(validated)} {model.__name__} record{s} from Bionty')}"
227
- f" matching {colors.italic(f'{field.field.name}')}: {print_values}"
228
- )
237
+ if not mute:
238
+ logger.success(
239
+ "created"
240
+ f" {colors.purple(f'{len(validated)} {model.__name__} record{s} from Bionty')}"
241
+ f" matching {colors.italic(f'{field.field.name}')}: {print_values}"
242
+ )
229
243
 
230
244
  # make sure that synonyms logging appears after the field logging
231
- if len(msg_syn) > 0:
245
+ if len(msg_syn) > 0 and not mute:
232
246
  logger.success(msg_syn)
233
247
  # warning about multi matches
234
- if len(multi_msg) > 0:
248
+ if len(multi_msg) > 0 and not mute:
235
249
  logger.warning(multi_msg)
236
250
 
237
251
  # return the values that are not found in the bionty reference
@@ -247,7 +261,8 @@ def index_iterable(iterable: Iterable) -> pd.Index:
247
261
 
248
262
 
249
263
  def _print_values(names: list, n: int = 20) -> str:
250
- print_values = ", ".join([f"'{name}'" for name in names[:n]])
264
+ names = list(set(names))
265
+ print_values = ", ".join([f"'{name}'" for name in names[:n] if name != "None"])
251
266
  if len(names) > n:
252
267
  print_values += ", ..."
253
268
  return print_values
lamindb/_registry.py CHANGED
@@ -134,6 +134,7 @@ def from_values(
134
134
  field: StrField | None = None,
135
135
  organism: Registry | str | None = None,
136
136
  public_source: Registry | None = None,
137
+ mute: bool = False,
137
138
  ) -> list[Registry]:
138
139
  """{}."""
139
140
  from_public = True if cls.__module__.startswith("lnschema_bionty.") else False
@@ -144,6 +145,7 @@ def from_values(
144
145
  from_public=from_public,
145
146
  organism=organism,
146
147
  public_source=public_source,
148
+ mute=mute,
147
149
  )
148
150
 
149
151
 
lamindb/_save.py CHANGED
@@ -16,11 +16,11 @@ from lamindb_setup.core.upath import UPath, print_hook
16
16
  from lnschema_core.models import Artifact, Registry
17
17
 
18
18
  from lamindb.core._settings import settings
19
- from lamindb.core.storage.file import (
19
+ from lamindb.core.storage.paths import (
20
20
  attempt_accessing_path,
21
21
  auto_storage_key_from_artifact,
22
22
  delete_storage_using_key,
23
- store_artifact,
23
+ store_file_or_folder,
24
24
  )
25
25
 
26
26
  try:
@@ -286,7 +286,7 @@ def upload_artifact(
286
286
  )
287
287
  msg = f"storing artifact '{artifact.uid}' at '{storage_path}'"
288
288
  if (
289
- artifact.suffix in {".zarr", ".zrad"}
289
+ artifact.suffix == ".zarr"
290
290
  and hasattr(artifact, "_memory_rep")
291
291
  and artifact._memory_rep is not None
292
292
  ):
@@ -295,6 +295,5 @@ def upload_artifact(
295
295
  write_adata_zarr(artifact._memory_rep, storage_path, callback=print_progress)
296
296
  elif hasattr(artifact, "_to_store") and artifact._to_store:
297
297
  logger.save(msg)
298
- store_artifact(artifact._local_filepath, storage_path)
299
-
298
+ store_file_or_folder(artifact._local_filepath, storage_path)
300
299
  return storage_path
lamindb/core/_data.py CHANGED
@@ -94,6 +94,23 @@ def save_feature_set_links(self: Artifact | Collection) -> None:
94
94
  bulk_create(links, ignore_conflicts=True)
95
95
 
96
96
 
97
+ def format_repr(value: Registry, exclude: list[str] | str | None = None) -> str:
98
+ if isinstance(exclude, str):
99
+ exclude = [exclude]
100
+ exclude_fields = set() if exclude is None else set(exclude)
101
+ exclude_fields.update(["created_at", "updated_at"])
102
+
103
+ fields = [
104
+ f
105
+ for f in value.__repr__(include_foreign_keys=False).split(", ")
106
+ if not any(f"{excluded_field}=" in f for excluded_field in exclude_fields)
107
+ ]
108
+ repr = ", ".join(fields)
109
+ if not repr.endswith(")"):
110
+ repr += ")"
111
+ return repr
112
+
113
+
97
114
  @doc_args(Data.describe.__doc__)
98
115
  def describe(self: Data):
99
116
  """{}."""
@@ -117,7 +134,7 @@ def describe(self: Data):
117
134
  msg += f"{colors.green('Provenance')}:\n "
118
135
  related_msg = "".join(
119
136
  [
120
- f"📎 {field}: {self.__getattribute__(field)}\n "
137
+ f"📎 {field}: {format_repr(self.__getattribute__(field))}\n "
121
138
  for field in foreign_key_fields
122
139
  if self.__getattribute__(field) is not None
123
140
  ]
@@ -91,6 +91,8 @@ def get_feature_set_links(host: Artifact | Collection) -> QuerySet:
91
91
  def print_features(self: Data) -> str:
92
92
  from lamindb._from_values import _print_values
93
93
 
94
+ from ._data import format_repr
95
+
94
96
  msg = ""
95
97
  features_lookup = Feature.objects.using(self._state.db).lookup().dict()
96
98
  for slot, feature_set in self.features._feature_set_by_slot.items():
@@ -98,12 +100,16 @@ def print_features(self: Data) -> str:
98
100
  features = feature_set.members
99
101
  name_field = get_default_str_field(features[0])
100
102
  feature_names = [getattr(feature, name_field) for feature in features]
101
- msg += f" {colors.bold(slot)}: {feature_set}\n"
103
+ msg += (
104
+ f" {colors.bold(slot)}: {format_repr(feature_set, exclude='hash')}\n"
105
+ )
102
106
  print_values = _print_values(feature_names, n=20)
103
107
  msg += f" {print_values}\n"
104
108
  else:
105
109
  df_slot = feature_set.features.df()
106
- msg += f" {colors.bold(slot)}: {feature_set}\n"
110
+ msg += (
111
+ f" {colors.bold(slot)}: {format_repr(feature_set, exclude='hash')}\n"
112
+ )
107
113
  for _, row in df_slot.iterrows():
108
114
  if row["type"] == "category" and row["registries"] is not None:
109
115
  labels = self.labels.get(
@@ -360,7 +360,6 @@ class run_context:
360
360
  run=cls.run,
361
361
  transform=cls.transform,
362
362
  filepath=cls.path,
363
- is_consecutive=True,
364
363
  )
365
364
  return None
366
365
 
lamindb/core/_settings.py CHANGED
@@ -64,7 +64,7 @@ class Settings:
64
64
  FAQ: :doc:`/faq/idempotency`
65
65
  """
66
66
  track_run_inputs: bool = True
67
- """Track files as input upon `.load()`, `.stage()` and `.backed()`.
67
+ """Track files as input upon `.load()`, `.cache()` and `.backed()`.
68
68
 
69
69
  Requires a global run context with :func:`~lamindb.track` was created!
70
70
 
@@ -299,7 +299,7 @@ def anndata_human_immune_cells(
299
299
  adata = sc.read('Global.h5ad')
300
300
  adata.obs = adata.obs[['donor_id', 'tissue', 'cell_type', 'assay', 'tissue_ontology_term_id', 'cell_type_ontology_term_id', 'assay_ontology_term_id']].copy()
301
301
  sc.pp.subsample(adata, fraction=0.005)
302
- del adata.uns["development_stage_ontology_term_id_colors"]
302
+ del adata.uns["development_cache_ontology_term_id_colors"]
303
303
  del adata.uns["sex_ontology_term_id_colors"]
304
304
  adata.write('human_immune.h5ad')
305
305
  """
@@ -10,5 +10,5 @@ from lamindb_setup.core.upath import LocalPathClasses, UPath, infer_filesystem
10
10
 
11
11
  from ._anndata_sizes import size_adata
12
12
  from ._backed_access import AnnDataAccessor, BackedAccessor
13
- from .file import delete_storage, load_to_memory
14
- from .object import infer_suffix, write_to_file
13
+ from .objects import infer_suffix, write_to_file
14
+ from .paths import delete_storage, load_to_memory
@@ -22,7 +22,7 @@ from lamindb_setup.core.upath import UPath, create_mapper, infer_filesystem
22
22
  from lnschema_core import Artifact
23
23
  from packaging import version
24
24
 
25
- from lamindb.core.storage.file import filepath_from_artifact
25
+ from lamindb.core.storage.paths import filepath_from_artifact
26
26
 
27
27
  if TYPE_CHECKING:
28
28
  from pathlib import Path
@@ -743,15 +743,15 @@ def backed_access(
743
743
 
744
744
  if filepath.suffix in (".h5", ".hdf5", ".h5ad"):
745
745
  conn, storage = registry.open("h5py", filepath)
746
- elif filepath.suffix in (".zarr", ".zrad"):
746
+ elif filepath.suffix == ".zarr":
747
747
  conn, storage = registry.open("zarr", filepath)
748
748
  else:
749
749
  raise ValueError(
750
- "file should have .h5, .hdf5, .h5ad, .zarr or .zrad suffix, not"
750
+ "file should have .h5, .hdf5, .h5ad, .zarr suffix, not"
751
751
  f" {filepath.suffix}."
752
752
  )
753
753
 
754
- if filepath.suffix in (".h5ad", ".zrad"):
754
+ if filepath.suffix in (".h5ad", ".zarr"):
755
755
  return AnnDataAccessor(conn, storage, name)
756
756
  else:
757
757
  if get_spec(storage).encoding_type == "anndata":
@@ -21,11 +21,10 @@ def infer_suffix(dmem, adata_format: str | None = None):
21
21
  """Infer LaminDB storage file suffix from a data object."""
22
22
  if isinstance(dmem, AnnData):
23
23
  if adata_format is not None:
24
- # below should be zrad, not zarr
25
- if adata_format not in ("h5ad", "zarr", "zrad"):
24
+ if adata_format not in ("h5ad", "zarr"):
26
25
  raise ValueError(
27
26
  "Error when specifying AnnData storage format, it should be"
28
- f" 'h5ad', 'zarr' or 'zrad', not '{adata_format}'. Check 'format'"
27
+ f" 'h5ad', 'zarr', not '{adata_format}'. Check 'format'"
29
28
  " or the suffix of 'key'."
30
29
  )
31
30
  return "." + adata_format
@@ -109,23 +109,23 @@ def read_adata_h5ad(filepath, **kwargs) -> ad.AnnData:
109
109
  return adata
110
110
 
111
111
 
112
- def store_artifact(localpath: UPathStr, storagepath: UPath) -> None:
113
- """Store directory or file to configured storage location."""
114
- localpath = Path(localpath)
115
- if not isinstance(storagepath, LocalPathClasses):
112
+ def store_file_or_folder(local_path: UPathStr, storage_path: UPath) -> None:
113
+ """Store file or folder (localpath) at storagepath."""
114
+ local_path = Path(local_path)
115
+ if not isinstance(storage_path, LocalPathClasses):
116
116
  # this uploads files and directories
117
- storagepath.upload_from(localpath, recursive=True, print_progress=True)
117
+ storage_path.upload_from(local_path, recursive=True, print_progress=True)
118
118
  else: # storage path is local
119
- storagepath.parent.mkdir(parents=True, exist_ok=True)
120
- if localpath.is_file():
119
+ storage_path.parent.mkdir(parents=True, exist_ok=True)
120
+ if local_path.is_file():
121
121
  try:
122
- shutil.copyfile(localpath, storagepath)
122
+ shutil.copyfile(local_path, storage_path)
123
123
  except shutil.SameFileError:
124
124
  pass
125
125
  else:
126
- if storagepath.exists():
127
- shutil.rmtree(storagepath)
128
- shutil.copytree(localpath, storagepath)
126
+ if storage_path.exists():
127
+ shutil.rmtree(storage_path)
128
+ shutil.copytree(local_path, storage_path)
129
129
 
130
130
 
131
131
  def delete_storage_using_key(
@@ -212,7 +212,7 @@ def load_to_memory(filepath: UPathStr, stream: bool = False, **kwargs):
212
212
  """
213
213
  filepath = create_path(filepath)
214
214
 
215
- if filepath.suffix not in {".h5ad", ".zarr", ".zrad"}:
215
+ if filepath.suffix not in {".h5ad", ".zarr"}:
216
216
  stream = False
217
217
 
218
218
  if not stream:
@@ -229,7 +229,6 @@ def load_to_memory(filepath: UPathStr, stream: bool = False, **kwargs):
229
229
  ".parquet": pd.read_parquet,
230
230
  ".fcs": read_fcs,
231
231
  ".zarr": read_adata_zarr,
232
- ".zrad": read_adata_zarr,
233
232
  ".html": load_html,
234
233
  ".json": load_json,
235
234
  ".h5mu": read_mdata_h5mu,
@@ -2,39 +2,48 @@ from __future__ import annotations
2
2
 
3
3
  import json
4
4
  from datetime import datetime, timezone
5
+ from typing import TYPE_CHECKING
5
6
 
6
7
  import lamindb_setup as ln_setup
7
8
  from lamin_utils import logger
8
9
 
9
10
  from lamindb._artifact import Artifact
10
11
 
12
+ if TYPE_CHECKING:
13
+ from vitessce import VitessceConfig
14
+
11
15
 
12
- # tested in lamin-spatial
13
- # can't type vitessce_config because can't assume it's installed
14
- def save_vitessce_config(vitessce_config, description: str) -> Artifact:
16
+ # tested & context in https://github.com/laminlabs/lamin-spatial
17
+ def save_vitessce_config(vitessce_config: VitessceConfig, description: str) -> Artifact:
15
18
  """Takes a ``VitessceConfig`` object and saves it as an artifact.
16
19
 
17
20
  Args:
18
21
  vitessce_config (``VitessceConfig``): A VitessceConfig object.
19
22
  description: A description for the artifact.
20
23
  """
24
+ # can't assume vitessce is installed
21
25
  from vitessce import VitessceConfig
22
26
 
23
- assert isinstance(vitessce_config, VitessceConfig)
27
+ # create a local _data export_ in a folder
24
28
  timestamp = datetime.now(timezone.utc).isoformat().split(".")[0]
25
- vitesse_export = f"./vitessce_export_{timestamp}.vitessce"
26
- vitessce_config.export(to="files", base_url="", out_dir=vitesse_export)
27
- logger.important(f"local export: {vitesse_export}")
28
- artifact = Artifact(vitesse_export, description=description)
29
+ vitesse_export_folder = f"./vitessce_export_{timestamp}.vitessce"
30
+ vitessce_config.export(to="files", base_url="", out_dir=vitesse_export_folder)
31
+ logger.important(f"local export: {vitesse_export_folder}")
32
+ # create an artifact and store the local export in th cloud
33
+ artifact = Artifact(vitesse_export_folder, description=description)
29
34
  artifact.save()
35
+ # create a JSON export that points to the data in the cloud
30
36
  config_dict = vitessce_config.to_dict(base_url=artifact.path.to_url())
31
37
  logger.important(f"base url: {artifact.path.to_url()}")
38
+ # manually place that JSON export into the local data export folder
32
39
  config_filename = "vitessce_config.json"
33
- config_file_local_path = f"{vitesse_export}/{config_filename}"
40
+ config_file_local_path = f"{vitesse_export_folder}/{config_filename}"
34
41
  with open(config_file_local_path, "w") as file:
35
42
  json.dump(config_dict, file)
43
+ # manually place that JSON export into the previously registered artifact folder
36
44
  config_file_path = artifact.path / config_filename
37
45
  config_file_path.upload_from(config_file_local_path)
46
+ # log the the URLs
38
47
  logger.important(f"config url: {config_file_path.to_url()}")
39
48
  slug = ln_setup.settings.instance.slug
40
49
  logger.important(f"go to: https://lamin.ai/{slug}/artifact/{artifact.uid}")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: lamindb
3
- Version: 0.69.10
3
+ Version: 0.70.1
4
4
  Summary: A data framework for biology.
5
5
  Author-email: Lamin Labs <open-source@lamin.ai>
6
6
  Requires-Python: >=3.8
@@ -9,10 +9,10 @@ Classifier: Programming Language :: Python :: 3.8
9
9
  Classifier: Programming Language :: Python :: 3.9
10
10
  Classifier: Programming Language :: Python :: 3.10
11
11
  Classifier: Programming Language :: Python :: 3.11
12
- Requires-Dist: lnschema_core==0.64.11
13
- Requires-Dist: lamindb_setup==0.69.0
12
+ Requires-Dist: lnschema_core==0.65.1
13
+ Requires-Dist: lamindb_setup==0.69.2
14
14
  Requires-Dist: lamin_utils==0.13.1
15
- Requires-Dist: lamin_cli==0.12.2
15
+ Requires-Dist: lamin_cli==0.12.3
16
16
  Requires-Dist: rapidfuzz
17
17
  Requires-Dist: pyarrow
18
18
  Requires-Dist: typing_extensions!=4.6.0
@@ -27,7 +27,7 @@ Requires-Dist: urllib3<2 ; extra == "aws"
27
27
  Requires-Dist: aiobotocore[boto3]>=2.5.4,<3.0.0 ; extra == "aws"
28
28
  Requires-Dist: s3fs==2023.12.2 ; extra == "aws"
29
29
  Requires-Dist: fsspec[s3]==2023.12.2 ; extra == "aws"
30
- Requires-Dist: bionty==0.42.8 ; extra == "bionty"
30
+ Requires-Dist: bionty==0.42.9 ; extra == "bionty"
31
31
  Requires-Dist: pandas<2 ; extra == "dev"
32
32
  Requires-Dist: pre-commit ; extra == "dev"
33
33
  Requires-Dist: nox ; extra == "dev"
@@ -1,32 +1,32 @@
1
- lamindb/__init__.py,sha256=CV6kzbhp0QdBkY2VOTJYYsswxKzqvzSgqoDE3Bf-cFE,2164
1
+ lamindb/__init__.py,sha256=z8Z1Bsb9A_xPvukQCyt7I2d5w_FsWd27bMZlULQJ7JU,2163
2
2
  lamindb/_annotate.py,sha256=BXYWFATifbfRVmLFIgE4cZ4Fls4lkH2WXAEL2o7v-XM,43035
3
- lamindb/_artifact.py,sha256=OiM6BMz7Vm852QGvyr24QZlXU0dHRID6CF50U0cBVnE,36491
4
- lamindb/_can_validate.py,sha256=qA6Ni9scObcIeYzuudVymgOjQVLLxnGNofBSDNDaqfY,14557
5
- lamindb/_collection.py,sha256=xzya4M_vU8lp0nksqxj2BfSWOTwLTr9Sp6Kwp47so5g,13985
3
+ lamindb/_artifact.py,sha256=BUl_3WYwrZ28P93Pb9AiwriVFBSLvBUEQjYEyYCrkZ0,37307
4
+ lamindb/_can_validate.py,sha256=nvoZG-35n3HofkY4Xc6hBv9AV54_RDan7Hzp5TuqY9I,14709
5
+ lamindb/_collection.py,sha256=wf7ClfiD3vsetts_iSUk4UihWsHq0IdOF8LdIIHS7JU,14536
6
6
  lamindb/_feature.py,sha256=srAKchY7gqD-h-cWlEiAWuHlpFKFwv0PWIA-JX0Go8c,6758
7
7
  lamindb/_feature_set.py,sha256=AzjOcHzQajpeikPOAic-aj0z_C5b7VpHVegg3ThRSLw,9045
8
8
  lamindb/_filter.py,sha256=xnjJzjF3Zj4dK_Kfymvhgczk27MhhXz5ZYc7XINbgHY,1331
9
- lamindb/_finish.py,sha256=4mCoDw24gXeOS_EALuUYIA57OqmcZWIi0Fk5fEDwmCg,8798
10
- lamindb/_from_values.py,sha256=OboM5lTysxjyu-wMWtD_Cw7SWJk8fG36tAQwb6FKM8k,12160
9
+ lamindb/_finish.py,sha256=SIPIIMAXM2d00L6VHMf2qFiOHuTyAlLy-5qRJ-BYaIQ,8107
10
+ lamindb/_from_values.py,sha256=DVXjnQ2wwNw-2bFzy0uXLdVlqoprrn95hTnrXwn-KqM,12638
11
11
  lamindb/_is_versioned.py,sha256=0PgRCmxEmYDcAjllLSOYZm132B1lW6QgmBBERhRyFt0,1341
12
12
  lamindb/_parents.py,sha256=N9T8jbd3eaoHDLE9TD1y1QgGcO81E6Brapy8LILzRCQ,14790
13
13
  lamindb/_query_manager.py,sha256=3zokXqxgj9vTJBnN2sbYKS-q69fyDDPF_aGq_rFHzXU,4066
14
14
  lamindb/_query_set.py,sha256=fy6xMK9MPGbD8D_i5iNzR8XA009W05ud4tbgrzd5-Vg,11287
15
- lamindb/_registry.py,sha256=CL-KlCC23HPWQgSeYg7Od2kbZdNViAjFqqdiGpKJKw8,19293
15
+ lamindb/_registry.py,sha256=-Bv10zSr6IY7QM5pu_35NiVjQDJnBcXRECVe9h7GEuY,19336
16
16
  lamindb/_run.py,sha256=b7A52M1On3QzFgIYyfQoz5Kk7V3wcu9p_Prq5bzd8v8,1838
17
- lamindb/_save.py,sha256=aqvE0ryZs4-sDk6DZPn-Ki724gHeLyA9w-1oN5m_XMU,11425
17
+ lamindb/_save.py,sha256=x16FBwltaTd1tnXm_zCxkvuVxyon6vRtekf37CfepXg,11426
18
18
  lamindb/_storage.py,sha256=VW8xq3VRv58-ciholvOdlcgvp_OIlLxx5GxLt-e2Irs,614
19
19
  lamindb/_transform.py,sha256=rxojJ91qQSkeYDHYbwqjFAYxBMgJd3cq_K7Z0n5g8Aw,3482
20
20
  lamindb/_ulabel.py,sha256=e5dw9h1tR0_u-DMn7Gzx0WhUhV5w7j4v3QbnLWQV7eI,1941
21
21
  lamindb/_utils.py,sha256=LGdiW4k3GClLz65vKAVRkL6Tw-Gkx9DWAdez1jyA5bE,428
22
22
  lamindb/_view.py,sha256=GV1FrqIMmdooEkA-5zvcTWgV1nqx1sehi6WdWEaFpxM,2171
23
23
  lamindb/core/__init__.py,sha256=MB1gEMKUf0GBQrI3dH8WRZOZQmWR4HIojXK_hXXVdqA,1235
24
- lamindb/core/_data.py,sha256=zxsrtf8a3PBzni1KLNVST45dxUgK11K7U6o9pmfK-xU,16905
25
- lamindb/core/_feature_manager.py,sha256=MHEU3I9m9p69TaTz1WTzAsimUan51RhxWV8G--_0BA8,15615
24
+ lamindb/core/_data.py,sha256=En3v29eiJARy5l7nSsttAsDsqDLTZ4-xM8fCNyVzExI,17465
25
+ lamindb/core/_feature_manager.py,sha256=LlYgU71AoTnrseWFCq-oZkUAYWITtRR7BNFm0AhHe-c,15773
26
26
  lamindb/core/_label_manager.py,sha256=0RtegYnK3zIisOnd970EobOrHMpp7OCH-mEoPrPXw2c,9075
27
27
  lamindb/core/_mapped_collection.py,sha256=_OwFZh5SePDUD70XIK5kngv3we_Z5-YdGHNfpUSatSQ,19469
28
- lamindb/core/_run_context.py,sha256=GS593lxHsbQyhbrp6ppzZ0r-fVe6W099NDXM6xyg8-U,17509
29
- lamindb/core/_settings.py,sha256=32109tBsMcwdGlNTWAGR7YBuCVXrrVfzIEKHWVk1bBQ,5727
28
+ lamindb/core/_run_context.py,sha256=xcsPhpabm2a-PnwH-vDJ4Mx4qQLdFsn4ZUpWMWKZgoM,17472
29
+ lamindb/core/_settings.py,sha256=r9si7wJb31tI4vfz9dUN4iXe6QQU7FjnqAEsHy2UDzM,5727
30
30
  lamindb/core/_sync_git.py,sha256=IlTqw55inPp_RZbN_YScaCeKza7LeF9mClQw55W3_d4,3921
31
31
  lamindb/core/_track_environment.py,sha256=xLZ6kgzxWS6MWZ5LQ_wkbJX99vmYOT8iQ-Fz4OHCgWw,754
32
32
  lamindb/core/_transform_settings.py,sha256=eV96QKX9jOojjzF-a0oo0wXQsMXN2F6QV7orE06oFC8,161
@@ -36,19 +36,19 @@ lamindb/core/fields.py,sha256=Jgi_XI-iTe6cT7oD8FV_JqEpjN1Q9rZWwL8VLtj4jkA,164
36
36
  lamindb/core/types.py,sha256=xeQF2x40p2pR9eIVQrXT74RrS810z2fbjmTRTSQUqPM,230
37
37
  lamindb/core/versioning.py,sha256=DsEHpCueNwhRiIaRH5-O8H_1fJVNtWslCRx30YiIS5o,3080
38
38
  lamindb/core/datasets/__init__.py,sha256=zRP98oqUAaXhqWyKMiH0s_ImVIuNeziQQ2kQ_t0f-DI,1353
39
- lamindb/core/datasets/_core.py,sha256=ZPsDETZp-SqQelK5O0l7sDZFjuz8Sy93UbTfSYqP9cc,20191
39
+ lamindb/core/datasets/_core.py,sha256=36vUOYFkX_4hBAnM_BujV5BRARMI5b9iI_SM9qS7wGc,20191
40
40
  lamindb/core/datasets/_fake.py,sha256=BZF9R_1iF0HDnvtZNqL2FtsjSMuqDIfuFxnw_LJYIh4,953
41
- lamindb/core/storage/__init__.py,sha256=9alBNtyH59VnoWJS-IdjLwFKlK-kgeCGl6jXk0_wGeQ,369
41
+ lamindb/core/storage/__init__.py,sha256=zsVbIseNLJfObDl7cuCbULr6rnux_R_N8EVV843XmIU,371
42
42
  lamindb/core/storage/_anndata_sizes.py,sha256=aXO3OB--tF5MChenSsigW6Q-RuE8YJJOUTVukkLrv9A,1029
43
- lamindb/core/storage/_backed_access.py,sha256=uW0zaeKune6k2zYJcLazIA7Xux9TS2tAHDNdz1syYZw,24557
43
+ lamindb/core/storage/_backed_access.py,sha256=DweJdYP8tKEOZbfJ1UKFQgAIzQjsJqdqj8U9y7T0oXo,24538
44
44
  lamindb/core/storage/_zarr.py,sha256=0i9-cJPjieIsp5UpK-IyRPkHAF-iKkWgpkWviSni2MM,2900
45
- lamindb/core/storage/file.py,sha256=AEMbrC6oWoIp3vs4F2bgm50EVBxkCGbN9R4na7Wanaw,7754
46
- lamindb/core/storage/object.py,sha256=37p8CSlfSlWPVuuD3MFcwQRj1n_kovLKVImh9SCR4Eg,1601
45
+ lamindb/core/storage/objects.py,sha256=5LbBeZVKuOOB8DceSE-PN8elKY0N9OhFXZPQJE4lK48,1538
46
+ lamindb/core/storage/paths.py,sha256=XWfSHK5b3_TFiK-IMvH-srvxO0bZStzA_rwjNaTxQU4,7725
47
47
  lamindb/integrations/__init__.py,sha256=aH2PmO2m4-vwIifMYTB0Fyyr_gZWtVnV71jT0tVWSw0,123
48
- lamindb/integrations/_vitessce.py,sha256=xLlFr_GzoY5RjyouPFEO-W7crBlTVQdHR2ns1IcCZFc,1673
48
+ lamindb/integrations/_vitessce.py,sha256=l3GPkzQXcwnMypbm8vDkITjognELjX8ucLaiqy99Jgg,2131
49
49
  lamindb/setup/__init__.py,sha256=OwZpZzPDv5lPPGXZP7-zK6UdO4FHvvuBh439yZvIp3A,410
50
50
  lamindb/setup/core/__init__.py,sha256=SevlVrc2AZWL3uALbE5sopxBnIZPWZ1IB0NBDudiAL8,167
51
- lamindb-0.69.10.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
52
- lamindb-0.69.10.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
53
- lamindb-0.69.10.dist-info/METADATA,sha256=R4DYvAOjBwW0ywfjHvYo4xLUABytQWm6iGg8LE8yfbU,2837
54
- lamindb-0.69.10.dist-info/RECORD,,
51
+ lamindb-0.70.1.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
52
+ lamindb-0.70.1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
53
+ lamindb-0.70.1.dist-info/METADATA,sha256=qbHYrCM8tdhGUBT-70lupGW7AQ5RVNA7gg9yRTvihwA,2835
54
+ lamindb-0.70.1.dist-info/RECORD,,