datachain 0.30.5__py3-none-any.whl → 0.30.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datachain might be problematic. Click here for more details.

@@ -1,30 +1,41 @@
1
1
  import sys
2
- from typing import TYPE_CHECKING, Optional
2
+ from collections.abc import Iterable, Iterator
3
+ from typing import TYPE_CHECKING, Optional, Union
3
4
 
4
5
  from tabulate import tabulate
5
6
 
6
- if TYPE_CHECKING:
7
- from datachain.catalog import Catalog
8
-
7
+ from datachain import semver
9
8
  from datachain.catalog import is_namespace_local
10
9
  from datachain.cli.utils import determine_flavors
11
10
  from datachain.config import Config
12
11
  from datachain.error import DataChainError, DatasetNotFoundError
13
12
  from datachain.studio import list_datasets as list_datasets_studio
14
13
 
14
+ if TYPE_CHECKING:
15
+ from datachain.catalog import Catalog
16
+
17
+
18
+ def group_dataset_versions(
19
+ datasets: Iterable[tuple[str, str]], latest_only=True
20
+ ) -> dict[str, Union[str, list[str]]]:
21
+ grouped: dict[str, list[tuple[int, int, int]]] = {}
15
22
 
16
- def group_dataset_versions(datasets, latest_only=True):
17
- grouped = {}
18
23
  # Sort to ensure groupby works as expected
19
24
  # (groupby expects consecutive items with the same key)
20
25
  for name, version in sorted(datasets):
21
- grouped.setdefault(name, []).append(version)
26
+ grouped.setdefault(name, []).append(semver.parse(version))
22
27
 
23
28
  if latest_only:
24
29
  # For each dataset name, pick the highest version.
25
- return {name: max(versions) for name, versions in grouped.items()}
30
+ return {
31
+ name: semver.create(*(max(versions))) for name, versions in grouped.items()
32
+ }
33
+
26
34
  # For each dataset name, return a sorted list of unique versions.
27
- return {name: sorted(set(versions)) for name, versions in grouped.items()}
35
+ return {
36
+ name: [semver.create(*v) for v in sorted(set(versions))]
37
+ for name, versions in grouped.items()
38
+ }
28
39
 
29
40
 
30
41
  def list_datasets(
@@ -35,7 +46,7 @@ def list_datasets(
35
46
  team: Optional[str] = None,
36
47
  latest_only: bool = True,
37
48
  name: Optional[str] = None,
38
- ):
49
+ ) -> None:
39
50
  token = Config().read().get("studio", {}).get("token")
40
51
  all, local, studio = determine_flavors(studio, local, all, token)
41
52
  if name:
@@ -95,27 +106,31 @@ def list_datasets(
95
106
  print(tabulate(rows, headers="keys"))
96
107
 
97
108
 
98
- def list_datasets_local(catalog: "Catalog", name: Optional[str] = None):
109
+ def list_datasets_local(
110
+ catalog: "Catalog", name: Optional[str] = None
111
+ ) -> Iterator[tuple[str, str]]:
99
112
  if name:
100
113
  yield from list_datasets_local_versions(catalog, name)
101
114
  return
102
115
 
103
116
  for d in catalog.ls_datasets():
104
117
  for v in d.versions:
105
- yield (d.full_name, v.version)
118
+ yield d.full_name, v.version
106
119
 
107
120
 
108
- def list_datasets_local_versions(catalog: "Catalog", name: str):
121
+ def list_datasets_local_versions(
122
+ catalog: "Catalog", name: str
123
+ ) -> Iterator[tuple[str, str]]:
109
124
  namespace_name, project_name, name = catalog.get_full_dataset_name(name)
110
125
 
111
126
  ds = catalog.get_dataset(
112
127
  name, namespace_name=namespace_name, project_name=project_name
113
128
  )
114
129
  for v in ds.versions:
115
- yield (name, v.version)
130
+ yield name, v.version
116
131
 
117
132
 
118
- def _datasets_tabulate_row(name, both, local_version, studio_version):
133
+ def _datasets_tabulate_row(name, both, local_version, studio_version) -> dict[str, str]:
119
134
  row = {
120
135
  "Name": name,
121
136
  }
@@ -136,7 +151,7 @@ def rm_dataset(
136
151
  force: Optional[bool] = False,
137
152
  studio: Optional[bool] = False,
138
153
  team: Optional[str] = None,
139
- ):
154
+ ) -> None:
140
155
  namespace_name, project_name, name = catalog.get_full_dataset_name(name)
141
156
 
142
157
  if studio:
@@ -166,7 +181,7 @@ def edit_dataset(
166
181
  description: Optional[str] = None,
167
182
  attrs: Optional[list[str]] = None,
168
183
  team: Optional[str] = None,
169
- ):
184
+ ) -> None:
170
185
  from datachain.lib.dc.utils import is_studio
171
186
 
172
187
  namespace_name, project_name, name = catalog.get_full_dataset_name(name)
datachain/delta.py CHANGED
@@ -4,7 +4,7 @@ from functools import wraps
4
4
  from typing import TYPE_CHECKING, Callable, Optional, TypeVar, Union
5
5
 
6
6
  import datachain
7
- from datachain.dataset import DatasetDependency
7
+ from datachain.dataset import DatasetDependency, DatasetRecord
8
8
  from datachain.error import DatasetNotFoundError
9
9
  from datachain.project import Project
10
10
 
@@ -30,9 +30,10 @@ def delta_disabled(
30
30
 
31
31
  @wraps(method)
32
32
  def _inner(self: T, *args: "P.args", **kwargs: "P.kwargs") -> T:
33
- if self.delta:
33
+ if self.delta and not self._delta_unsafe:
34
34
  raise NotImplementedError(
35
- f"Delta update cannot be used with {method.__name__}"
35
+ f"Cannot use {method.__name__} with delta datasets - may cause"
36
+ " inconsistency. Use delta_unsafe flag to allow this operation."
36
37
  )
37
38
  return method(self, *args, **kwargs)
38
39
 
@@ -124,10 +125,19 @@ def _get_retry_chain(
124
125
  # Subtract also diff chain since some items might be picked
125
126
  # up by `delta=True` itself (e.g. records got modified AND are missing in the
126
127
  # result dataset atm)
127
- return retry_chain.subtract(diff_chain, on=on) if retry_chain else None
128
+ on = [on] if isinstance(on, str) else on
129
+
130
+ return (
131
+ retry_chain.diff(
132
+ diff_chain, on=on, added=True, same=True, modified=False, deleted=False
133
+ ).distinct(*on)
134
+ if retry_chain
135
+ else None
136
+ )
128
137
 
129
138
 
130
139
  def _get_source_info(
140
+ source_ds: DatasetRecord,
131
141
  name: str,
132
142
  namespace_name: str,
133
143
  project_name: str,
@@ -154,25 +164,23 @@ def _get_source_info(
154
164
  indirect=False,
155
165
  )
156
166
 
157
- dep = dependencies[0]
158
- if not dep:
167
+ source_ds_dep = next((d for d in dependencies if d.name == source_ds.name), None)
168
+ if not source_ds_dep:
159
169
  # Starting dataset was removed, back off to normal dataset creation
160
170
  return None, None, None, None, None
161
171
 
162
- source_ds_project = catalog.metastore.get_project(dep.project, dep.namespace)
163
- source_ds_name = dep.name
164
- source_ds_version = dep.version
165
- source_ds_latest_version = catalog.get_dataset(
166
- source_ds_name,
167
- namespace_name=source_ds_project.namespace.name,
168
- project_name=source_ds_project.name,
169
- ).latest_version
172
+ # Refresh starting dataset to have new versions if they are created
173
+ source_ds = catalog.get_dataset(
174
+ source_ds.name,
175
+ namespace_name=source_ds.project.namespace.name,
176
+ project_name=source_ds.project.name,
177
+ )
170
178
 
171
179
  return (
172
- source_ds_name,
173
- source_ds_project,
174
- source_ds_version,
175
- source_ds_latest_version,
180
+ source_ds.name,
181
+ source_ds.project,
182
+ source_ds_dep.version,
183
+ source_ds.latest_version,
176
184
  dependencies,
177
185
  )
178
186
 
@@ -244,7 +252,14 @@ def delta_retry_update(
244
252
  source_ds_version,
245
253
  source_ds_latest_version,
246
254
  dependencies,
247
- ) = _get_source_info(name, namespace_name, project_name, latest_version, catalog)
255
+ ) = _get_source_info(
256
+ dc._query.starting_step.dataset, # type: ignore[union-attr]
257
+ name,
258
+ namespace_name,
259
+ project_name,
260
+ latest_version,
261
+ catalog,
262
+ )
248
263
 
249
264
  # If source_ds_name is None, starting dataset was removed
250
265
  if source_ds_name is None:
@@ -267,8 +282,9 @@ def delta_retry_update(
267
282
  if dependencies:
268
283
  dependencies = copy(dependencies)
269
284
  dependencies = [d for d in dependencies if d is not None]
285
+ source_ds_dep = next(d for d in dependencies if d.name == source_ds_name)
270
286
  # Update to latest version
271
- dependencies[0].version = source_ds_latest_version # type: ignore[union-attr]
287
+ source_ds_dep.version = source_ds_latest_version # type: ignore[union-attr]
272
288
 
273
289
  # Handle retry functionality if enabled
274
290
  if delta_retry:
@@ -193,6 +193,7 @@ class DataChain:
193
193
  self._setup: dict = setup or {}
194
194
  self._sys = _sys
195
195
  self._delta = False
196
+ self._delta_unsafe = False
196
197
  self._delta_on: Optional[Union[str, Sequence[str]]] = None
197
198
  self._delta_result_on: Optional[Union[str, Sequence[str]]] = None
198
199
  self._delta_compare: Optional[Union[str, Sequence[str]]] = None
@@ -216,6 +217,7 @@ class DataChain:
216
217
  right_on: Optional[Union[str, Sequence[str]]] = None,
217
218
  compare: Optional[Union[str, Sequence[str]]] = None,
218
219
  delta_retry: Optional[Union[bool, str]] = None,
220
+ delta_unsafe: bool = False,
219
221
  ) -> "Self":
220
222
  """Marks this chain as delta, which means special delta process will be
221
223
  called on saving dataset for optimization"""
@@ -226,6 +228,7 @@ class DataChain:
226
228
  self._delta_result_on = right_on
227
229
  self._delta_compare = compare
228
230
  self._delta_retry = delta_retry
231
+ self._delta_unsafe = delta_unsafe
229
232
  return self
230
233
 
231
234
  @property
@@ -238,6 +241,10 @@ class DataChain:
238
241
  """Returns True if this chain is ran in "delta" update mode"""
239
242
  return self._delta
240
243
 
244
+ @property
245
+ def delta_unsafe(self) -> bool:
246
+ return self._delta_unsafe
247
+
241
248
  @property
242
249
  def schema(self) -> dict[str, DataType]:
243
250
  """Get schema of the chain."""
@@ -328,6 +335,7 @@ class DataChain:
328
335
  right_on=self._delta_result_on,
329
336
  compare=self._delta_compare,
330
337
  delta_retry=self._delta_retry,
338
+ delta_unsafe=self._delta_unsafe,
331
339
  )
332
340
 
333
341
  return chain
@@ -40,6 +40,7 @@ def read_dataset(
40
40
  delta_result_on: Optional[Union[str, Sequence[str]]] = None,
41
41
  delta_compare: Optional[Union[str, Sequence[str]]] = None,
42
42
  delta_retry: Optional[Union[bool, str]] = None,
43
+ delta_unsafe: bool = False,
43
44
  update: bool = False,
44
45
  ) -> "DataChain":
45
46
  """Get data from a saved Dataset. It returns the chain itself.
@@ -80,6 +81,8 @@ def read_dataset(
80
81
  update: If True always checks for newer versions available on Studio, even if
81
82
  some version of the dataset exists locally already. If False (default), it
82
83
  will only fetch the dataset from Studio if it is not found locally.
84
+ delta_unsafe: Allow restricted ops in delta: merge, agg, union, group_by,
85
+ distinct.
83
86
 
84
87
 
85
88
  Example:
@@ -205,6 +208,7 @@ def read_dataset(
205
208
  right_on=delta_result_on,
206
209
  compare=delta_compare,
207
210
  delta_retry=delta_retry,
211
+ delta_unsafe=delta_unsafe,
208
212
  )
209
213
 
210
214
  return chain
@@ -43,6 +43,7 @@ def read_storage(
43
43
  delta_result_on: Optional[Union[str, Sequence[str]]] = None,
44
44
  delta_compare: Optional[Union[str, Sequence[str]]] = None,
45
45
  delta_retry: Optional[Union[bool, str]] = None,
46
+ delta_unsafe: bool = False,
46
47
  client_config: Optional[dict] = None,
47
48
  ) -> "DataChain":
48
49
  """Get data from storage(s) as a list of file with all file attributes.
@@ -77,6 +78,9 @@ def read_storage(
77
78
  (error mode)
78
79
  - True: Reprocess records missing from the result dataset (missing mode)
79
80
  - None: No retry processing (default)
81
+ delta_unsafe: Allow restricted ops in delta: merge, agg, union, group_by,
82
+ distinct. Caller must ensure datasets are consistent and not partially
83
+ updated.
80
84
 
81
85
  Returns:
82
86
  DataChain: A DataChain object containing the file information.
@@ -218,6 +222,7 @@ def read_storage(
218
222
  right_on=delta_result_on,
219
223
  compare=delta_compare,
220
224
  delta_retry=delta_retry,
225
+ delta_unsafe=delta_unsafe,
221
226
  )
222
227
 
223
228
  return storage_chain
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datachain
3
- Version: 0.30.5
3
+ Version: 0.30.6
4
4
  Summary: Wrangle unstructured AI data at scale
5
5
  Author-email: Dmitry Petrov <support@dvc.org>
6
6
  License-Expression: Apache-2.0
@@ -4,7 +4,7 @@ datachain/asyn.py,sha256=RH_jFwJcTXxhEFomaI9yL6S3Onau6NZ6FSKfKFGtrJE,9689
4
4
  datachain/cache.py,sha256=ESVRaCJXEThMIfGEFVHx6wJPOZA7FYk9V6WxjyuqUBY,3626
5
5
  datachain/config.py,sha256=g8qbNV0vW2VEKpX-dGZ9pAn0DAz6G2ZFcr7SAV3PoSM,4272
6
6
  datachain/dataset.py,sha256=ATGa-CBTFoZeTN2V40-zHEzfMBcdYK0WuoJ6H2yEAvo,25268
7
- datachain/delta.py,sha256=dghGvD44LcglvL5-kUOIKk75ywBO0U7eikA3twKZC28,10202
7
+ datachain/delta.py,sha256=X5Lw6GQ8MAYNl2YIExNvl0tPIkylQEWwnCw0We7NtHM,10693
8
8
  datachain/error.py,sha256=OWwWMkzZYJrkcoEDGhJHMf7SfKvxcsOLRF94mjPf29I,1609
9
9
  datachain/job.py,sha256=x5PB6d5sqx00hePNNkirESlOVAvnmkEM5ygUgQmAhsk,1262
10
10
  datachain/listing.py,sha256=aqayl5St3D9PwdwM6nR1STkpLSw-S3U8pudO9PWi3N8,7241
@@ -27,7 +27,7 @@ datachain/catalog/loader.py,sha256=53VnuSRkt_CO9RdlHWkzQsPF55qMxcXvEm3ecsZREw8,6
27
27
  datachain/cli/__init__.py,sha256=so3WxEQF03KdGvjav15Sw7a6-lriiE24uDSGbBDBp8o,8298
28
28
  datachain/cli/utils.py,sha256=wrLnAh7Wx8O_ojZE8AE4Lxn5WoxHbOj7as8NWlLAA74,3036
29
29
  datachain/cli/commands/__init__.py,sha256=zp3bYIioO60x_X04A4-IpZqSYVnpwOa1AdERQaRlIhI,493
30
- datachain/cli/commands/datasets.py,sha256=Q2zYbiWXYPjg6e_YHyUKaYRg1L6-lxv0L214bogwsUY,6565
30
+ datachain/cli/commands/datasets.py,sha256=DAbONwcA__JM1qkcKVOP5sKukGbCGqLWCMBkBscA3_s,6971
31
31
  datachain/cli/commands/du.py,sha256=9edEzDEs98K2VYk8Wf-ZMpUzALcgm9uD6YtoqbvtUGU,391
32
32
  datachain/cli/commands/index.py,sha256=eglNaIe1yyIadUHHumjtNbgIjht6kme7SS7xE3YHR88,198
33
33
  datachain/cli/commands/ls.py,sha256=CBmk838Q-EQp04lE2Qdnpsc1GXAkC4-I-b-a_828n1E,5272
@@ -104,15 +104,15 @@ datachain/lib/convert/values_to_tuples.py,sha256=j5yZMrVUH6W7b-7yUvdCTGI7JCUAYUO
104
104
  datachain/lib/dc/__init__.py,sha256=UrUzmDH6YyVl8fxM5iXTSFtl5DZTUzEYm1MaazK4vdQ,900
105
105
  datachain/lib/dc/csv.py,sha256=q6a9BpapGwP6nwy6c5cklxQumep2fUp9l2LAjtTJr6s,4411
106
106
  datachain/lib/dc/database.py,sha256=F6EOjPKwSdp26kJsOKGq49D9OxqyKEalINHEwLQav2s,14716
107
- datachain/lib/dc/datachain.py,sha256=cJ0lbFteO5ync08M1QbriRrSAATOmU-nDkbxSH6SYgA,99462
108
- datachain/lib/dc/datasets.py,sha256=HKQXnCpIGFsYQ9ociLAUm8cwg2H0GaUmgWCF4FkKpbk,15180
107
+ datachain/lib/dc/datachain.py,sha256=2UtDhtBzx5VejkDE0UTS3t1517jCGr7YEKvO5wqNU-Q,99709
108
+ datachain/lib/dc/datasets.py,sha256=-Bvyyu4XXDXLiWa-bOnsp0Q11RSYXRO0j5DaX8ShaFs,15355
109
109
  datachain/lib/dc/hf.py,sha256=AP_MUHg6HJWae10PN9hD_beQVjrl0cleZ6Cvhtl1yoI,2901
110
110
  datachain/lib/dc/json.py,sha256=dNijfJ-H92vU3soyR7X1IiDrWhm6yZIGG3bSnZkPdAE,2733
111
111
  datachain/lib/dc/listings.py,sha256=V379Cb-7ZyquM0w7sWArQZkzInZy4GB7QQ1ZfowKzQY,4544
112
112
  datachain/lib/dc/pandas.py,sha256=ObueUXDUFKJGu380GmazdG02ARpKAHPhSaymfmOH13E,1489
113
113
  datachain/lib/dc/parquet.py,sha256=zYcSgrWwyEDW9UxGUSVdIVsCu15IGEf0xL8KfWQqK94,1782
114
114
  datachain/lib/dc/records.py,sha256=4N1Fq-j5r4GK-PR5jIO-9B2u_zTNX9l-6SmcRhQDAsw,3136
115
- datachain/lib/dc/storage.py,sha256=FXroEdxOZfbuEBIWfWTkbGwrI0D4_mrLZSRsIQm0WFE,7693
115
+ datachain/lib/dc/storage.py,sha256=OMJE-9ob9Ku5le8W6O8J1W-XJ0pwHt2PsO-ZCcee1ZA,7950
116
116
  datachain/lib/dc/utils.py,sha256=9OMiFu2kXIbtMqzJTEr1qbCoCBGpOmTnkWImVgFTKgo,4112
117
117
  datachain/lib/dc/values.py,sha256=7l1n352xWrEdql2NhBcZ3hj8xyPglWiY4qHjFPjn6iw,1428
118
118
  datachain/model/__init__.py,sha256=R9faX5OHV1xh2EW-g2MPedwbtEqt3LodJRyluB-QylI,189
@@ -160,9 +160,9 @@ datachain/sql/sqlite/vector.py,sha256=ncW4eu2FlJhrP_CIpsvtkUabZlQdl2D5Lgwy_cbfqR
160
160
  datachain/toolkit/__init__.py,sha256=eQ58Q5Yf_Fgv1ZG0IO5dpB4jmP90rk8YxUWmPc1M2Bo,68
161
161
  datachain/toolkit/split.py,sha256=ktGWzY4kyzjWyR86dhvzw-Zhl0lVk_LOX3NciTac6qo,2914
162
162
  datachain/torch/__init__.py,sha256=gIS74PoEPy4TB3X6vx9nLO0Y3sLJzsA8ckn8pRWihJM,579
163
- datachain-0.30.5.dist-info/licenses/LICENSE,sha256=8DnqK5yoPI_E50bEg_zsHKZHY2HqPy4rYN338BHQaRA,11344
164
- datachain-0.30.5.dist-info/METADATA,sha256=90OhCbSbqZn245Dm4-9zgbyBxI6N2NF8jsIdEiw6PUs,13898
165
- datachain-0.30.5.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
166
- datachain-0.30.5.dist-info/entry_points.txt,sha256=0GMJS6B_KWq0m3VT98vQI2YZodAMkn4uReZ_okga9R4,49
167
- datachain-0.30.5.dist-info/top_level.txt,sha256=lZPpdU_2jJABLNIg2kvEOBi8PtsYikbN1OdMLHk8bTg,10
168
- datachain-0.30.5.dist-info/RECORD,,
163
+ datachain-0.30.6.dist-info/licenses/LICENSE,sha256=8DnqK5yoPI_E50bEg_zsHKZHY2HqPy4rYN338BHQaRA,11344
164
+ datachain-0.30.6.dist-info/METADATA,sha256=ZyXo8wdTrN08k--Soy3UHpCu_Jni_6ocO3_PbjCswCE,13898
165
+ datachain-0.30.6.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
166
+ datachain-0.30.6.dist-info/entry_points.txt,sha256=0GMJS6B_KWq0m3VT98vQI2YZodAMkn4uReZ_okga9R4,49
167
+ datachain-0.30.6.dist-info/top_level.txt,sha256=lZPpdU_2jJABLNIg2kvEOBi8PtsYikbN1OdMLHk8bTg,10
168
+ datachain-0.30.6.dist-info/RECORD,,