datachain 0.7.8__py3-none-any.whl → 0.7.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datachain might be problematic. Click here for more details.

datachain/cli.py CHANGED
@@ -16,7 +16,7 @@ from tabulate import tabulate
16
16
  from datachain import Session, utils
17
17
  from datachain.cli_utils import BooleanOptionalAction, CommaSeparatedArgs, KeyValueArgs
18
18
  from datachain.config import Config
19
- from datachain.error import DataChainError
19
+ from datachain.error import DataChainError, DatasetNotFoundError
20
20
  from datachain.lib.dc import DataChain
21
21
  from datachain.studio import (
22
22
  edit_studio_dataset,
@@ -1056,7 +1056,10 @@ def rm_dataset(
1056
1056
  all, local, studio = _determine_flavors(studio, local, all, token)
1057
1057
 
1058
1058
  if all or local:
1059
- catalog.remove_dataset(name, version=version, force=force)
1059
+ try:
1060
+ catalog.remove_dataset(name, version=version, force=force)
1061
+ except DatasetNotFoundError:
1062
+ print("Dataset not found in local", file=sys.stderr)
1060
1063
 
1061
1064
  if (all or studio) and token:
1062
1065
  remove_studio_dataset(team, name, version, force)
@@ -1077,7 +1080,10 @@ def edit_dataset(
1077
1080
  all, local, studio = _determine_flavors(studio, local, all, token)
1078
1081
 
1079
1082
  if all or local:
1080
- catalog.edit_dataset(name, new_name, description, labels)
1083
+ try:
1084
+ catalog.edit_dataset(name, new_name, description, labels)
1085
+ except DatasetNotFoundError:
1086
+ print("Dataset not found in local", file=sys.stderr)
1081
1087
 
1082
1088
  if (all or studio) and token:
1083
1089
  edit_studio_dataset(team, name, new_name, description, labels)
@@ -725,9 +725,10 @@ class AbstractDBMetastore(AbstractMetastore):
725
725
 
726
726
  def list_datasets(self) -> Iterator["DatasetListRecord"]:
727
727
  """Lists all datasets."""
728
- yield from self._parse_dataset_list(
729
- self.db.execute(self._base_list_datasets_query())
728
+ query = self._base_list_datasets_query().order_by(
729
+ self._datasets.c.name, self._datasets_versions.c.version
730
730
  )
731
+ yield from self._parse_dataset_list(self.db.execute(query))
731
732
 
732
733
  def list_datasets_by_prefix(
733
734
  self, prefix: str, conn=None
datachain/lib/dc.py CHANGED
@@ -1446,6 +1446,7 @@ class DataChain:
1446
1446
  tokenizer=tokenizer,
1447
1447
  tokenizer_kwargs=tokenizer_kwargs,
1448
1448
  num_samples=num_samples,
1449
+ dc_settings=chain._settings,
1449
1450
  )
1450
1451
 
1451
1452
  def remove_file_signals(self) -> "Self": # noqa: D102
datachain/lib/pytorch.py CHANGED
@@ -10,8 +10,10 @@ from torchvision.transforms import v2
10
10
  from tqdm import tqdm
11
11
 
12
12
  from datachain import Session
13
+ from datachain.asyn import AsyncMapper
13
14
  from datachain.catalog import Catalog, get_catalog
14
15
  from datachain.lib.dc import DataChain
16
+ from datachain.lib.settings import Settings
15
17
  from datachain.lib.text import convert_text
16
18
 
17
19
  if TYPE_CHECKING:
@@ -30,6 +32,8 @@ def label_to_int(value: str, classes: list) -> int:
30
32
 
31
33
 
32
34
  class PytorchDataset(IterableDataset):
35
+ prefetch: int = 2
36
+
33
37
  def __init__(
34
38
  self,
35
39
  name: str,
@@ -39,6 +43,7 @@ class PytorchDataset(IterableDataset):
39
43
  tokenizer: Optional[Callable] = None,
40
44
  tokenizer_kwargs: Optional[dict[str, Any]] = None,
41
45
  num_samples: int = 0,
46
+ dc_settings: Optional[Settings] = None,
42
47
  ):
43
48
  """
44
49
  Pytorch IterableDataset that streams DataChain datasets.
@@ -66,6 +71,11 @@ class PytorchDataset(IterableDataset):
66
71
  catalog = get_catalog()
67
72
  self._init_catalog(catalog)
68
73
 
74
+ dc_settings = dc_settings or Settings()
75
+ self.cache = dc_settings.cache
76
+ if (prefetch := dc_settings.prefetch) is not None:
77
+ self.prefetch = prefetch
78
+
69
79
  def _init_catalog(self, catalog: "Catalog"):
70
80
  # For compatibility with multiprocessing,
71
81
  # we can only store params in __init__(), as Catalog isn't picklable
@@ -82,51 +92,58 @@ class PytorchDataset(IterableDataset):
82
92
  wh = wh_cls(*wh_args, **wh_kwargs)
83
93
  return Catalog(ms, wh, **self._catalog_params)
84
94
 
85
- def __iter__(self) -> Iterator[Any]:
86
- if self.catalog is None:
87
- self.catalog = self._get_catalog()
88
- session = Session.get(catalog=self.catalog)
89
- total_rank, total_workers = self.get_rank_and_workers()
95
+ def _rows_iter(self, total_rank: int, total_workers: int):
96
+ catalog = self._get_catalog()
97
+ session = Session("PyTorch", catalog=catalog)
90
98
  ds = DataChain.from_dataset(
91
99
  name=self.name, version=self.version, session=session
92
- )
100
+ ).settings(cache=self.cache, prefetch=self.prefetch)
93
101
  ds = ds.remove_file_signals()
94
102
 
95
103
  if self.num_samples > 0:
96
104
  ds = ds.sample(self.num_samples)
97
105
  ds = ds.chunk(total_rank, total_workers)
106
+ yield from ds.collect()
107
+
108
+ def __iter__(self) -> Iterator[Any]:
109
+ total_rank, total_workers = self.get_rank_and_workers()
110
+ rows = self._rows_iter(total_rank, total_workers)
111
+ if self.prefetch > 0:
112
+ from datachain.lib.udf import _prefetch_input
113
+
114
+ rows = AsyncMapper(_prefetch_input, rows, workers=self.prefetch).iterate()
115
+
98
116
  desc = f"Parsed PyTorch dataset for rank={total_rank} worker"
99
- with tqdm(desc=desc, unit=" rows") as pbar:
100
- for row_features in ds.collect():
101
- row = []
102
- for fr in row_features:
103
- if hasattr(fr, "read"):
104
- row.append(fr.read()) # type: ignore[unreachable]
105
- else:
106
- row.append(fr)
107
- # Apply transforms
108
- if self.transform:
109
- try:
110
- if isinstance(self.transform, v2.Transform):
111
- row = self.transform(row)
112
- for i, val in enumerate(row):
113
- if isinstance(val, Image.Image):
114
- row[i] = self.transform(val)
115
- except ValueError:
116
- logger.warning(
117
- "Skipping transform due to unsupported data types."
118
- )
119
- self.transform = None
120
- if self.tokenizer:
121
- for i, val in enumerate(row):
122
- if isinstance(val, str) or (
123
- isinstance(val, list) and isinstance(val[0], str)
124
- ):
125
- row[i] = convert_text(
126
- val, self.tokenizer, self.tokenizer_kwargs
127
- ).squeeze(0) # type: ignore[union-attr]
128
- yield row
129
- pbar.update(1)
117
+ with tqdm(rows, desc=desc, unit=" rows", position=total_rank) as rows_it:
118
+ yield from map(self._process_row, rows_it)
119
+
120
+ def _process_row(self, row_features):
121
+ row = []
122
+ for fr in row_features:
123
+ if hasattr(fr, "read"):
124
+ row.append(fr.read()) # type: ignore[unreachable]
125
+ else:
126
+ row.append(fr)
127
+ # Apply transforms
128
+ if self.transform:
129
+ try:
130
+ if isinstance(self.transform, v2.Transform):
131
+ row = self.transform(row)
132
+ for i, val in enumerate(row):
133
+ if isinstance(val, Image.Image):
134
+ row[i] = self.transform(val)
135
+ except ValueError:
136
+ logger.warning("Skipping transform due to unsupported data types.")
137
+ self.transform = None
138
+ if self.tokenizer:
139
+ for i, val in enumerate(row):
140
+ if isinstance(val, str) or (
141
+ isinstance(val, list) and isinstance(val[0], str)
142
+ ):
143
+ row[i] = convert_text(
144
+ val, self.tokenizer, self.tokenizer_kwargs
145
+ ).squeeze(0) # type: ignore[union-attr]
146
+ return row
130
147
 
131
148
  @staticmethod
132
149
  def get_rank_and_workers() -> tuple[int, int]:
@@ -119,18 +119,27 @@ class StudioClient:
119
119
  "\tpip install 'datachain[remote]'"
120
120
  ) from None
121
121
 
122
- def _send_request_msgpack(self, route: str, data: dict[str, Any]) -> Response[Any]:
122
+ def _send_request_msgpack(
123
+ self, route: str, data: dict[str, Any], method: Optional[str] = "POST"
124
+ ) -> Response[Any]:
123
125
  import msgpack
124
126
  import requests
125
127
 
126
- response = requests.post(
127
- f"{self.url}/{route}",
128
- json={**data, "team_name": self.team},
128
+ kwargs = (
129
+ {"params": {**data, "team_name": self.team}}
130
+ if method == "GET"
131
+ else {"json": {**data, "team_name": self.team}}
132
+ )
133
+
134
+ response = requests.request(
135
+ method=method, # type: ignore[arg-type]
136
+ url=f"{self.url}/{route}",
129
137
  headers={
130
138
  "Content-Type": "application/json",
131
139
  "Authorization": f"token {self.token}",
132
140
  },
133
141
  timeout=self.timeout,
142
+ **kwargs, # type: ignore[arg-type]
134
143
  )
135
144
  ok = response.ok
136
145
  if not ok:
@@ -148,7 +157,9 @@ class StudioClient:
148
157
  return Response(response_data, ok, message)
149
158
 
150
159
  @retry_with_backoff(retries=5)
151
- def _send_request(self, route: str, data: dict[str, Any]) -> Response[Any]:
160
+ def _send_request(
161
+ self, route: str, data: dict[str, Any], method: Optional[str] = "POST"
162
+ ) -> Response[Any]:
152
163
  """
153
164
  Function that communicate Studio API.
154
165
  It will raise an exception, and try to retry, if 5xx status code is
@@ -157,14 +168,21 @@ class StudioClient:
157
168
  """
158
169
  import requests
159
170
 
160
- response = requests.post(
161
- f"{self.url}/{route}",
162
- json={**data, "team_name": self.team},
171
+ kwargs = (
172
+ {"params": {**data, "team_name": self.team}}
173
+ if method == "GET"
174
+ else {"json": {**data, "team_name": self.team}}
175
+ )
176
+
177
+ response = requests.request(
178
+ method=method, # type: ignore[arg-type]
179
+ url=f"{self.url}/{route}",
163
180
  headers={
164
181
  "Content-Type": "application/json",
165
182
  "Authorization": f"token {self.token}",
166
183
  },
167
184
  timeout=self.timeout,
185
+ **kwargs, # type: ignore[arg-type]
168
186
  )
169
187
  try:
170
188
  response.raise_for_status()
@@ -222,7 +240,7 @@ class StudioClient:
222
240
  yield path, response
223
241
 
224
242
  def ls_datasets(self) -> Response[LsData]:
225
- return self._send_request("datachain/ls-datasets", {})
243
+ return self._send_request("datachain/datasets", {}, method="GET")
226
244
 
227
245
  def edit_dataset(
228
246
  self,
@@ -232,20 +250,14 @@ class StudioClient:
232
250
  labels: Optional[list[str]] = None,
233
251
  ) -> Response[DatasetInfoData]:
234
252
  body = {
253
+ "new_name": new_name,
235
254
  "dataset_name": name,
255
+ "description": description,
256
+ "labels": labels,
236
257
  }
237
258
 
238
- if new_name is not None:
239
- body["new_name"] = new_name
240
-
241
- if description is not None:
242
- body["description"] = description
243
-
244
- if labels is not None:
245
- body["labels"] = labels # type: ignore[assignment]
246
-
247
259
  return self._send_request(
248
- "datachain/edit-dataset",
260
+ "datachain/datasets",
249
261
  body,
250
262
  )
251
263
 
@@ -256,12 +268,13 @@ class StudioClient:
256
268
  force: Optional[bool] = False,
257
269
  ) -> Response[DatasetInfoData]:
258
270
  return self._send_request(
259
- "datachain/rm-dataset",
271
+ "datachain/datasets",
260
272
  {
261
273
  "dataset_name": name,
262
274
  "version": version,
263
275
  "force": force,
264
276
  },
277
+ method="DELETE",
265
278
  )
266
279
 
267
280
  def dataset_info(self, name: str) -> Response[DatasetInfoData]:
@@ -272,7 +285,9 @@ class StudioClient:
272
285
 
273
286
  return dataset_info
274
287
 
275
- response = self._send_request("datachain/dataset-info", {"dataset_name": name})
288
+ response = self._send_request(
289
+ "datachain/datasets/info", {"dataset_name": name}, method="GET"
290
+ )
276
291
  if response.ok:
277
292
  response.data = _parse_dataset_info(response.data)
278
293
  return response
@@ -282,14 +297,16 @@ class StudioClient:
282
297
  ) -> Response[DatasetRowsData]:
283
298
  req_data = {"dataset_name": name, "dataset_version": version}
284
299
  return self._send_request_msgpack(
285
- "datachain/dataset-rows",
300
+ "datachain/datasets/rows",
286
301
  {**req_data, "offset": offset, "limit": DATASET_ROWS_CHUNK_SIZE},
302
+ method="GET",
287
303
  )
288
304
 
289
305
  def dataset_stats(self, name: str, version: int) -> Response[DatasetStatsData]:
290
306
  response = self._send_request(
291
- "datachain/dataset-stats",
307
+ "datachain/datasets/stats",
292
308
  {"dataset_name": name, "dataset_version": version},
309
+ method="GET",
293
310
  )
294
311
  if response.ok:
295
312
  response.data = DatasetStats(**response.data)
@@ -299,16 +316,18 @@ class StudioClient:
299
316
  self, name: str, version: int
300
317
  ) -> Response[DatasetExportSignedUrls]:
301
318
  return self._send_request(
302
- "datachain/dataset-export",
319
+ "datachain/datasets/export",
303
320
  {"dataset_name": name, "dataset_version": version},
321
+ method="GET",
304
322
  )
305
323
 
306
324
  def dataset_export_status(
307
325
  self, name: str, version: int
308
326
  ) -> Response[DatasetExportStatus]:
309
327
  return self._send_request(
310
- "datachain/dataset-export-status",
328
+ "datachain/datasets/export-status",
311
329
  {"dataset_name": name, "dataset_version": version},
330
+ method="GET",
312
331
  )
313
332
 
314
333
  def upload_file(self, file_name: str, content: bytes) -> Response[FileUploadData]:
datachain/studio.py CHANGED
@@ -155,7 +155,7 @@ def edit_studio_dataset(
155
155
  if not response.ok:
156
156
  raise_remote_error(response.message)
157
157
 
158
- print(f"Dataset {name} updated")
158
+ print(f"Dataset '{name}' updated in Studio")
159
159
 
160
160
 
161
161
  def remove_studio_dataset(
@@ -169,7 +169,7 @@ def remove_studio_dataset(
169
169
  if not response.ok:
170
170
  raise_remote_error(response.message)
171
171
 
172
- print(f"Dataset {name} removed")
172
+ print(f"Dataset '{name}' removed from Studio")
173
173
 
174
174
 
175
175
  def save_config(hostname, token):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: datachain
3
- Version: 0.7.8
3
+ Version: 0.7.9
4
4
  Summary: Wrangle unstructured AI data at scale
5
5
  Author-email: Dmitry Petrov <support@dvc.org>
6
6
  License: Apache-2.0
@@ -98,7 +98,7 @@ Requires-Dist: unstructured[embed-huggingface,pdf]<0.16.0; extra == "examples"
98
98
  Requires-Dist: pdfplumber==0.11.4; extra == "examples"
99
99
  Requires-Dist: huggingface_hub[hf_transfer]; extra == "examples"
100
100
  Requires-Dist: onnx==1.16.1; extra == "examples"
101
- Requires-Dist: ultralytics==8.3.29; extra == "examples"
101
+ Requires-Dist: ultralytics==8.3.37; extra == "examples"
102
102
 
103
103
  ================
104
104
  |logo| DataChain
@@ -2,7 +2,7 @@ datachain/__init__.py,sha256=ofPJ6B-d-ybSDRrE7J6wqF_ZRAB2W9U8l-eeuBtqPLg,865
2
2
  datachain/__main__.py,sha256=hG3Y4ARGEqe1AWwNMd259rBlqtphx1Wk39YbueQ0yV8,91
3
3
  datachain/asyn.py,sha256=5aKrjnUxk0mtnZeFKNJd1DCE0MsnSoyJBZkr0y9H_a0,9313
4
4
  datachain/cache.py,sha256=s0YHN7qurmQv-eC265TjeureK84TebWWAnL07cxchZQ,2997
5
- datachain/cli.py,sha256=Ysm-6Kb-54FfkN35VJIe5vW7Kik8VGA3wcyCUnqPBHg,42245
5
+ datachain/cli.py,sha256=wQiYQ_qSVCGvS06pkknT9_FIBdFRzBdeRusW9uXE3vQ,42505
6
6
  datachain/cli_utils.py,sha256=jrn9ejGXjybeO1ur3fjdSiAyCHZrX0qsLLbJzN9ErPM,2418
7
7
  datachain/config.py,sha256=g8qbNV0vW2VEKpX-dGZ9pAn0DAz6G2ZFcr7SAV3PoSM,4272
8
8
  datachain/dataset.py,sha256=P-pDBgvPqJGDhq_I7fwCfb6hY8E8mIAO8Q0NT7SNlNE,19128
@@ -14,7 +14,7 @@ datachain/nodes_fetcher.py,sha256=ILMzUW5o4_6lUOVrLDC9gJPCXfcgKnMG68plrc7dAOA,11
14
14
  datachain/nodes_thread_pool.py,sha256=uPo-xl8zG5m9YgODjPFBpbcqqHjI-dcxH87yAbj_qco,3192
15
15
  datachain/progress.py,sha256=5KotcvvzAUL_RF0GEj4JY0IB1lyImnmHxe89YkT1XO4,4330
16
16
  datachain/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
- datachain/studio.py,sha256=MthVADn-jM2I5TlESOfbzFnKGZjpuk9bM8m2vqOK-C8,7227
17
+ datachain/studio.py,sha256=Hr0Ha0kou0so4i8i-gWiXC1AYlJ2arI1D55cc7mi3tg,7253
18
18
  datachain/telemetry.py,sha256=0A4IOPPp9VlP5pyW9eBfaTK3YhHGzHl7dQudQjUAx9A,994
19
19
  datachain/utils.py,sha256=-mSFowjIidJ4_sMXInvNHLn4rK_QnHuIlLuH1_lMGmI,13897
20
20
  datachain/catalog/__init__.py,sha256=g2iAAFx_gEIrqshXlhSEbrc8qDaEH11cjU40n3CHDz4,409
@@ -32,7 +32,7 @@ datachain/client/s3.py,sha256=CVHBUZ1Ic2Q3370nl-Bbe69phuWjFlrVv9dTJKBpRT0,6019
32
32
  datachain/data_storage/__init__.py,sha256=9Wit-oe5P46V7CJQTD0BJ5MhOa2Y9h3ddJ4VWTe-Lec,273
33
33
  datachain/data_storage/db_engine.py,sha256=81Ol1of9TTTzD97ORajCnP366Xz2mEJt6C-kTUCaru4,3406
34
34
  datachain/data_storage/job.py,sha256=w-7spowjkOa1P5fUVtJou3OltT0L48P0RYWZ9rSJ9-s,383
35
- datachain/data_storage/metastore.py,sha256=EzSsfR_l_84i1AewYygpdsJyzGqEmvXjpeohlYF7h4A,37435
35
+ datachain/data_storage/metastore.py,sha256=hfTITcesE9XlUTxcCcdDyWGGep-QSjJL9DUxko5QCeI,37524
36
36
  datachain/data_storage/schema.py,sha256=-QVlRvD0dfu-ZFUxylEoSnLJLnleMEjVlcAb2OGu-AY,9895
37
37
  datachain/data_storage/serializer.py,sha256=6G2YtOFqqDzJf1KbvZraKGXl2XHZyVml2krunWUum5o,927
38
38
  datachain/data_storage/sqlite.py,sha256=D_ZQ0PHmZzHO2dinv4naVJocUDIZUwV4WAz692C1cyk,22521
@@ -53,7 +53,7 @@ datachain/lib/arrow.py,sha256=b5efxAUaNNYVwtXVJqj07D3zf5KC-BPlLCxKEZbEG6w,9429
53
53
  datachain/lib/clip.py,sha256=lm5CzVi4Cj1jVLEKvERKArb-egb9j1Ls-fwTItT6vlI,6150
54
54
  datachain/lib/data_model.py,sha256=zS4lmXHVBXc9ntcyea2a1CRLXGSAN_0glXcF88CohgY,2685
55
55
  datachain/lib/dataset_info.py,sha256=IjdF1E0TQNOq9YyynfWiCFTeZpbyGfyJvxgJY4YN810,2493
56
- datachain/lib/dc.py,sha256=t5y5tsYyU7uuk3gEPPhhcDSZ1tL1aHkKG2W54eHiUq8,89492
56
+ datachain/lib/dc.py,sha256=xqLR4IH_mbuet0FsxBHDsRUg-zR6tO8UZdLQQTLG8EE,89533
57
57
  datachain/lib/file.py,sha256=-XMkL6ED1sE7TMhWoMRTEuOXswZJw8X6AEmJDONFP74,15019
58
58
  datachain/lib/hf.py,sha256=a-zFpDmZIR4r8dlNNTjfpAKSnuJ9xyRXlgcdENiXt3E,5864
59
59
  datachain/lib/image.py,sha256=AMXYwQsmarZjRbPCZY3M1jDsM2WAB_b3cTY4uOIuXNU,2675
@@ -61,7 +61,7 @@ datachain/lib/listing.py,sha256=cVkCp7TRVpcZKSx-Bbk9t51bQI9Mw0o86W6ZPhAsuzM,3667
61
61
  datachain/lib/listing_info.py,sha256=9ua40Hw0aiQByUw3oAEeNzMavJYfW0Uhe8YdCTK-m_g,1110
62
62
  datachain/lib/meta_formats.py,sha256=anK2bDVbaeCCh0yvKUBaW2MVos3zRgdaSV8uSduzPcU,6680
63
63
  datachain/lib/model_store.py,sha256=DNIv8Y6Jtk1_idNLzIpsThOsdW2BMAudyUCbPUcgcxk,2515
64
- datachain/lib/pytorch.py,sha256=Nh6fUbQMLX8OpZvX4tw4bJjTCQpRKi0jSLgkJnLHdTM,5880
64
+ datachain/lib/pytorch.py,sha256=QMJO_OGEMvBi2x71vGcG25agLzNwyLmF4Qx5iILlwaM,6350
65
65
  datachain/lib/settings.py,sha256=ZELRCTLbi5vzRPiDX6cQ9LLg9TefJ_A05gIGni0lll8,2535
66
66
  datachain/lib/signal_schema.py,sha256=_uh19nCKhiD9ua8oIN1Q8R9iYv1BZAuqTJCLYVmyW8k,24557
67
67
  datachain/lib/tar.py,sha256=3WIzao6yD5fbLqXLTt9GhPGNonbFIs_fDRu-9vgLgsA,1038
@@ -96,7 +96,7 @@ datachain/query/queue.py,sha256=waqM_KzavU8C-G95-4211Nd4GXna_u2747Chgwtgz2w,3839
96
96
  datachain/query/schema.py,sha256=b_KnVy6B26Ol4nYG0LqNNpeQ1QYPk95YRGUjXfdaQWs,6606
97
97
  datachain/query/session.py,sha256=vvLIJ5b8eElovHLAWq_CZJXmN5t7C7iAZA7x9wPPOms,5905
98
98
  datachain/remote/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
99
- datachain/remote/studio.py,sha256=jp6NWo7OPUxqO8uYEHP0_XFlmj47rMxC80qKQ7rA3Xk,11024
99
+ datachain/remote/studio.py,sha256=WiK6fpRAw0a6Dth4XXI0YInEHH4gDU7AUHHDNd3wJzg,11616
100
100
  datachain/sql/__init__.py,sha256=6SQRdbljO3d2hx3EAVXEZrHQKv5jth0Jh98PogT59No,262
101
101
  datachain/sql/selectable.py,sha256=cTc60qVoAwqqss0Vop8Lt5Z-ROnM1XrQmL_GLjRxhXs,1765
102
102
  datachain/sql/types.py,sha256=ASSPkmM5EzdRindqj2O7WHLXq8VHAgFYedG8lYfGvVI,14045
@@ -118,9 +118,9 @@ datachain/sql/sqlite/vector.py,sha256=ncW4eu2FlJhrP_CIpsvtkUabZlQdl2D5Lgwy_cbfqR
118
118
  datachain/toolkit/__init__.py,sha256=eQ58Q5Yf_Fgv1ZG0IO5dpB4jmP90rk8YxUWmPc1M2Bo,68
119
119
  datachain/toolkit/split.py,sha256=ZgDcrNiKiPXZmKD591_1z9qRIXitu5zwAsoVPB7ykiU,2508
120
120
  datachain/torch/__init__.py,sha256=gIS74PoEPy4TB3X6vx9nLO0Y3sLJzsA8ckn8pRWihJM,579
121
- datachain-0.7.8.dist-info/LICENSE,sha256=8DnqK5yoPI_E50bEg_zsHKZHY2HqPy4rYN338BHQaRA,11344
122
- datachain-0.7.8.dist-info/METADATA,sha256=r8znUWHdmY5y6hk8N9NFdlrKaHKkteeji7NXJTb2Ges,18006
123
- datachain-0.7.8.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
124
- datachain-0.7.8.dist-info/entry_points.txt,sha256=0GMJS6B_KWq0m3VT98vQI2YZodAMkn4uReZ_okga9R4,49
125
- datachain-0.7.8.dist-info/top_level.txt,sha256=lZPpdU_2jJABLNIg2kvEOBi8PtsYikbN1OdMLHk8bTg,10
126
- datachain-0.7.8.dist-info/RECORD,,
121
+ datachain-0.7.9.dist-info/LICENSE,sha256=8DnqK5yoPI_E50bEg_zsHKZHY2HqPy4rYN338BHQaRA,11344
122
+ datachain-0.7.9.dist-info/METADATA,sha256=iu58cwfGQVYTwn53symALXVpe9292EWXdOly2MWuPZY,18006
123
+ datachain-0.7.9.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
124
+ datachain-0.7.9.dist-info/entry_points.txt,sha256=0GMJS6B_KWq0m3VT98vQI2YZodAMkn4uReZ_okga9R4,49
125
+ datachain-0.7.9.dist-info/top_level.txt,sha256=lZPpdU_2jJABLNIg2kvEOBi8PtsYikbN1OdMLHk8bTg,10
126
+ datachain-0.7.9.dist-info/RECORD,,