datachain 0.7.8__py3-none-any.whl → 0.7.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of datachain might be problematic. Click here for more details.
- datachain/cli.py +9 -3
- datachain/data_storage/metastore.py +3 -2
- datachain/lib/dc.py +1 -0
- datachain/lib/pytorch.py +54 -37
- datachain/remote/studio.py +44 -25
- datachain/studio.py +2 -2
- {datachain-0.7.8.dist-info → datachain-0.7.9.dist-info}/METADATA +2 -2
- {datachain-0.7.8.dist-info → datachain-0.7.9.dist-info}/RECORD +12 -12
- {datachain-0.7.8.dist-info → datachain-0.7.9.dist-info}/LICENSE +0 -0
- {datachain-0.7.8.dist-info → datachain-0.7.9.dist-info}/WHEEL +0 -0
- {datachain-0.7.8.dist-info → datachain-0.7.9.dist-info}/entry_points.txt +0 -0
- {datachain-0.7.8.dist-info → datachain-0.7.9.dist-info}/top_level.txt +0 -0
datachain/cli.py
CHANGED
|
@@ -16,7 +16,7 @@ from tabulate import tabulate
|
|
|
16
16
|
from datachain import Session, utils
|
|
17
17
|
from datachain.cli_utils import BooleanOptionalAction, CommaSeparatedArgs, KeyValueArgs
|
|
18
18
|
from datachain.config import Config
|
|
19
|
-
from datachain.error import DataChainError
|
|
19
|
+
from datachain.error import DataChainError, DatasetNotFoundError
|
|
20
20
|
from datachain.lib.dc import DataChain
|
|
21
21
|
from datachain.studio import (
|
|
22
22
|
edit_studio_dataset,
|
|
@@ -1056,7 +1056,10 @@ def rm_dataset(
|
|
|
1056
1056
|
all, local, studio = _determine_flavors(studio, local, all, token)
|
|
1057
1057
|
|
|
1058
1058
|
if all or local:
|
|
1059
|
-
|
|
1059
|
+
try:
|
|
1060
|
+
catalog.remove_dataset(name, version=version, force=force)
|
|
1061
|
+
except DatasetNotFoundError:
|
|
1062
|
+
print("Dataset not found in local", file=sys.stderr)
|
|
1060
1063
|
|
|
1061
1064
|
if (all or studio) and token:
|
|
1062
1065
|
remove_studio_dataset(team, name, version, force)
|
|
@@ -1077,7 +1080,10 @@ def edit_dataset(
|
|
|
1077
1080
|
all, local, studio = _determine_flavors(studio, local, all, token)
|
|
1078
1081
|
|
|
1079
1082
|
if all or local:
|
|
1080
|
-
|
|
1083
|
+
try:
|
|
1084
|
+
catalog.edit_dataset(name, new_name, description, labels)
|
|
1085
|
+
except DatasetNotFoundError:
|
|
1086
|
+
print("Dataset not found in local", file=sys.stderr)
|
|
1081
1087
|
|
|
1082
1088
|
if (all or studio) and token:
|
|
1083
1089
|
edit_studio_dataset(team, name, new_name, description, labels)
|
|
@@ -725,9 +725,10 @@ class AbstractDBMetastore(AbstractMetastore):
|
|
|
725
725
|
|
|
726
726
|
def list_datasets(self) -> Iterator["DatasetListRecord"]:
|
|
727
727
|
"""Lists all datasets."""
|
|
728
|
-
|
|
729
|
-
self.
|
|
728
|
+
query = self._base_list_datasets_query().order_by(
|
|
729
|
+
self._datasets.c.name, self._datasets_versions.c.version
|
|
730
730
|
)
|
|
731
|
+
yield from self._parse_dataset_list(self.db.execute(query))
|
|
731
732
|
|
|
732
733
|
def list_datasets_by_prefix(
|
|
733
734
|
self, prefix: str, conn=None
|
datachain/lib/dc.py
CHANGED
datachain/lib/pytorch.py
CHANGED
|
@@ -10,8 +10,10 @@ from torchvision.transforms import v2
|
|
|
10
10
|
from tqdm import tqdm
|
|
11
11
|
|
|
12
12
|
from datachain import Session
|
|
13
|
+
from datachain.asyn import AsyncMapper
|
|
13
14
|
from datachain.catalog import Catalog, get_catalog
|
|
14
15
|
from datachain.lib.dc import DataChain
|
|
16
|
+
from datachain.lib.settings import Settings
|
|
15
17
|
from datachain.lib.text import convert_text
|
|
16
18
|
|
|
17
19
|
if TYPE_CHECKING:
|
|
@@ -30,6 +32,8 @@ def label_to_int(value: str, classes: list) -> int:
|
|
|
30
32
|
|
|
31
33
|
|
|
32
34
|
class PytorchDataset(IterableDataset):
|
|
35
|
+
prefetch: int = 2
|
|
36
|
+
|
|
33
37
|
def __init__(
|
|
34
38
|
self,
|
|
35
39
|
name: str,
|
|
@@ -39,6 +43,7 @@ class PytorchDataset(IterableDataset):
|
|
|
39
43
|
tokenizer: Optional[Callable] = None,
|
|
40
44
|
tokenizer_kwargs: Optional[dict[str, Any]] = None,
|
|
41
45
|
num_samples: int = 0,
|
|
46
|
+
dc_settings: Optional[Settings] = None,
|
|
42
47
|
):
|
|
43
48
|
"""
|
|
44
49
|
Pytorch IterableDataset that streams DataChain datasets.
|
|
@@ -66,6 +71,11 @@ class PytorchDataset(IterableDataset):
|
|
|
66
71
|
catalog = get_catalog()
|
|
67
72
|
self._init_catalog(catalog)
|
|
68
73
|
|
|
74
|
+
dc_settings = dc_settings or Settings()
|
|
75
|
+
self.cache = dc_settings.cache
|
|
76
|
+
if (prefetch := dc_settings.prefetch) is not None:
|
|
77
|
+
self.prefetch = prefetch
|
|
78
|
+
|
|
69
79
|
def _init_catalog(self, catalog: "Catalog"):
|
|
70
80
|
# For compatibility with multiprocessing,
|
|
71
81
|
# we can only store params in __init__(), as Catalog isn't picklable
|
|
@@ -82,51 +92,58 @@ class PytorchDataset(IterableDataset):
|
|
|
82
92
|
wh = wh_cls(*wh_args, **wh_kwargs)
|
|
83
93
|
return Catalog(ms, wh, **self._catalog_params)
|
|
84
94
|
|
|
85
|
-
def
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
session = Session.get(catalog=self.catalog)
|
|
89
|
-
total_rank, total_workers = self.get_rank_and_workers()
|
|
95
|
+
def _rows_iter(self, total_rank: int, total_workers: int):
|
|
96
|
+
catalog = self._get_catalog()
|
|
97
|
+
session = Session("PyTorch", catalog=catalog)
|
|
90
98
|
ds = DataChain.from_dataset(
|
|
91
99
|
name=self.name, version=self.version, session=session
|
|
92
|
-
)
|
|
100
|
+
).settings(cache=self.cache, prefetch=self.prefetch)
|
|
93
101
|
ds = ds.remove_file_signals()
|
|
94
102
|
|
|
95
103
|
if self.num_samples > 0:
|
|
96
104
|
ds = ds.sample(self.num_samples)
|
|
97
105
|
ds = ds.chunk(total_rank, total_workers)
|
|
106
|
+
yield from ds.collect()
|
|
107
|
+
|
|
108
|
+
def __iter__(self) -> Iterator[Any]:
|
|
109
|
+
total_rank, total_workers = self.get_rank_and_workers()
|
|
110
|
+
rows = self._rows_iter(total_rank, total_workers)
|
|
111
|
+
if self.prefetch > 0:
|
|
112
|
+
from datachain.lib.udf import _prefetch_input
|
|
113
|
+
|
|
114
|
+
rows = AsyncMapper(_prefetch_input, rows, workers=self.prefetch).iterate()
|
|
115
|
+
|
|
98
116
|
desc = f"Parsed PyTorch dataset for rank={total_rank} worker"
|
|
99
|
-
with tqdm(desc=desc, unit=" rows") as
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
pbar.update(1)
|
|
117
|
+
with tqdm(rows, desc=desc, unit=" rows", position=total_rank) as rows_it:
|
|
118
|
+
yield from map(self._process_row, rows_it)
|
|
119
|
+
|
|
120
|
+
def _process_row(self, row_features):
|
|
121
|
+
row = []
|
|
122
|
+
for fr in row_features:
|
|
123
|
+
if hasattr(fr, "read"):
|
|
124
|
+
row.append(fr.read()) # type: ignore[unreachable]
|
|
125
|
+
else:
|
|
126
|
+
row.append(fr)
|
|
127
|
+
# Apply transforms
|
|
128
|
+
if self.transform:
|
|
129
|
+
try:
|
|
130
|
+
if isinstance(self.transform, v2.Transform):
|
|
131
|
+
row = self.transform(row)
|
|
132
|
+
for i, val in enumerate(row):
|
|
133
|
+
if isinstance(val, Image.Image):
|
|
134
|
+
row[i] = self.transform(val)
|
|
135
|
+
except ValueError:
|
|
136
|
+
logger.warning("Skipping transform due to unsupported data types.")
|
|
137
|
+
self.transform = None
|
|
138
|
+
if self.tokenizer:
|
|
139
|
+
for i, val in enumerate(row):
|
|
140
|
+
if isinstance(val, str) or (
|
|
141
|
+
isinstance(val, list) and isinstance(val[0], str)
|
|
142
|
+
):
|
|
143
|
+
row[i] = convert_text(
|
|
144
|
+
val, self.tokenizer, self.tokenizer_kwargs
|
|
145
|
+
).squeeze(0) # type: ignore[union-attr]
|
|
146
|
+
return row
|
|
130
147
|
|
|
131
148
|
@staticmethod
|
|
132
149
|
def get_rank_and_workers() -> tuple[int, int]:
|
datachain/remote/studio.py
CHANGED
|
@@ -119,18 +119,27 @@ class StudioClient:
|
|
|
119
119
|
"\tpip install 'datachain[remote]'"
|
|
120
120
|
) from None
|
|
121
121
|
|
|
122
|
-
def _send_request_msgpack(
|
|
122
|
+
def _send_request_msgpack(
|
|
123
|
+
self, route: str, data: dict[str, Any], method: Optional[str] = "POST"
|
|
124
|
+
) -> Response[Any]:
|
|
123
125
|
import msgpack
|
|
124
126
|
import requests
|
|
125
127
|
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
128
|
+
kwargs = (
|
|
129
|
+
{"params": {**data, "team_name": self.team}}
|
|
130
|
+
if method == "GET"
|
|
131
|
+
else {"json": {**data, "team_name": self.team}}
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
response = requests.request(
|
|
135
|
+
method=method, # type: ignore[arg-type]
|
|
136
|
+
url=f"{self.url}/{route}",
|
|
129
137
|
headers={
|
|
130
138
|
"Content-Type": "application/json",
|
|
131
139
|
"Authorization": f"token {self.token}",
|
|
132
140
|
},
|
|
133
141
|
timeout=self.timeout,
|
|
142
|
+
**kwargs, # type: ignore[arg-type]
|
|
134
143
|
)
|
|
135
144
|
ok = response.ok
|
|
136
145
|
if not ok:
|
|
@@ -148,7 +157,9 @@ class StudioClient:
|
|
|
148
157
|
return Response(response_data, ok, message)
|
|
149
158
|
|
|
150
159
|
@retry_with_backoff(retries=5)
|
|
151
|
-
def _send_request(
|
|
160
|
+
def _send_request(
|
|
161
|
+
self, route: str, data: dict[str, Any], method: Optional[str] = "POST"
|
|
162
|
+
) -> Response[Any]:
|
|
152
163
|
"""
|
|
153
164
|
Function that communicate Studio API.
|
|
154
165
|
It will raise an exception, and try to retry, if 5xx status code is
|
|
@@ -157,14 +168,21 @@ class StudioClient:
|
|
|
157
168
|
"""
|
|
158
169
|
import requests
|
|
159
170
|
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
171
|
+
kwargs = (
|
|
172
|
+
{"params": {**data, "team_name": self.team}}
|
|
173
|
+
if method == "GET"
|
|
174
|
+
else {"json": {**data, "team_name": self.team}}
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
response = requests.request(
|
|
178
|
+
method=method, # type: ignore[arg-type]
|
|
179
|
+
url=f"{self.url}/{route}",
|
|
163
180
|
headers={
|
|
164
181
|
"Content-Type": "application/json",
|
|
165
182
|
"Authorization": f"token {self.token}",
|
|
166
183
|
},
|
|
167
184
|
timeout=self.timeout,
|
|
185
|
+
**kwargs, # type: ignore[arg-type]
|
|
168
186
|
)
|
|
169
187
|
try:
|
|
170
188
|
response.raise_for_status()
|
|
@@ -222,7 +240,7 @@ class StudioClient:
|
|
|
222
240
|
yield path, response
|
|
223
241
|
|
|
224
242
|
def ls_datasets(self) -> Response[LsData]:
|
|
225
|
-
return self._send_request("datachain/
|
|
243
|
+
return self._send_request("datachain/datasets", {}, method="GET")
|
|
226
244
|
|
|
227
245
|
def edit_dataset(
|
|
228
246
|
self,
|
|
@@ -232,20 +250,14 @@ class StudioClient:
|
|
|
232
250
|
labels: Optional[list[str]] = None,
|
|
233
251
|
) -> Response[DatasetInfoData]:
|
|
234
252
|
body = {
|
|
253
|
+
"new_name": new_name,
|
|
235
254
|
"dataset_name": name,
|
|
255
|
+
"description": description,
|
|
256
|
+
"labels": labels,
|
|
236
257
|
}
|
|
237
258
|
|
|
238
|
-
if new_name is not None:
|
|
239
|
-
body["new_name"] = new_name
|
|
240
|
-
|
|
241
|
-
if description is not None:
|
|
242
|
-
body["description"] = description
|
|
243
|
-
|
|
244
|
-
if labels is not None:
|
|
245
|
-
body["labels"] = labels # type: ignore[assignment]
|
|
246
|
-
|
|
247
259
|
return self._send_request(
|
|
248
|
-
"datachain/
|
|
260
|
+
"datachain/datasets",
|
|
249
261
|
body,
|
|
250
262
|
)
|
|
251
263
|
|
|
@@ -256,12 +268,13 @@ class StudioClient:
|
|
|
256
268
|
force: Optional[bool] = False,
|
|
257
269
|
) -> Response[DatasetInfoData]:
|
|
258
270
|
return self._send_request(
|
|
259
|
-
"datachain/
|
|
271
|
+
"datachain/datasets",
|
|
260
272
|
{
|
|
261
273
|
"dataset_name": name,
|
|
262
274
|
"version": version,
|
|
263
275
|
"force": force,
|
|
264
276
|
},
|
|
277
|
+
method="DELETE",
|
|
265
278
|
)
|
|
266
279
|
|
|
267
280
|
def dataset_info(self, name: str) -> Response[DatasetInfoData]:
|
|
@@ -272,7 +285,9 @@ class StudioClient:
|
|
|
272
285
|
|
|
273
286
|
return dataset_info
|
|
274
287
|
|
|
275
|
-
response = self._send_request(
|
|
288
|
+
response = self._send_request(
|
|
289
|
+
"datachain/datasets/info", {"dataset_name": name}, method="GET"
|
|
290
|
+
)
|
|
276
291
|
if response.ok:
|
|
277
292
|
response.data = _parse_dataset_info(response.data)
|
|
278
293
|
return response
|
|
@@ -282,14 +297,16 @@ class StudioClient:
|
|
|
282
297
|
) -> Response[DatasetRowsData]:
|
|
283
298
|
req_data = {"dataset_name": name, "dataset_version": version}
|
|
284
299
|
return self._send_request_msgpack(
|
|
285
|
-
"datachain/
|
|
300
|
+
"datachain/datasets/rows",
|
|
286
301
|
{**req_data, "offset": offset, "limit": DATASET_ROWS_CHUNK_SIZE},
|
|
302
|
+
method="GET",
|
|
287
303
|
)
|
|
288
304
|
|
|
289
305
|
def dataset_stats(self, name: str, version: int) -> Response[DatasetStatsData]:
|
|
290
306
|
response = self._send_request(
|
|
291
|
-
"datachain/
|
|
307
|
+
"datachain/datasets/stats",
|
|
292
308
|
{"dataset_name": name, "dataset_version": version},
|
|
309
|
+
method="GET",
|
|
293
310
|
)
|
|
294
311
|
if response.ok:
|
|
295
312
|
response.data = DatasetStats(**response.data)
|
|
@@ -299,16 +316,18 @@ class StudioClient:
|
|
|
299
316
|
self, name: str, version: int
|
|
300
317
|
) -> Response[DatasetExportSignedUrls]:
|
|
301
318
|
return self._send_request(
|
|
302
|
-
"datachain/
|
|
319
|
+
"datachain/datasets/export",
|
|
303
320
|
{"dataset_name": name, "dataset_version": version},
|
|
321
|
+
method="GET",
|
|
304
322
|
)
|
|
305
323
|
|
|
306
324
|
def dataset_export_status(
|
|
307
325
|
self, name: str, version: int
|
|
308
326
|
) -> Response[DatasetExportStatus]:
|
|
309
327
|
return self._send_request(
|
|
310
|
-
"datachain/
|
|
328
|
+
"datachain/datasets/export-status",
|
|
311
329
|
{"dataset_name": name, "dataset_version": version},
|
|
330
|
+
method="GET",
|
|
312
331
|
)
|
|
313
332
|
|
|
314
333
|
def upload_file(self, file_name: str, content: bytes) -> Response[FileUploadData]:
|
datachain/studio.py
CHANGED
|
@@ -155,7 +155,7 @@ def edit_studio_dataset(
|
|
|
155
155
|
if not response.ok:
|
|
156
156
|
raise_remote_error(response.message)
|
|
157
157
|
|
|
158
|
-
print(f"Dataset {name} updated")
|
|
158
|
+
print(f"Dataset '{name}' updated in Studio")
|
|
159
159
|
|
|
160
160
|
|
|
161
161
|
def remove_studio_dataset(
|
|
@@ -169,7 +169,7 @@ def remove_studio_dataset(
|
|
|
169
169
|
if not response.ok:
|
|
170
170
|
raise_remote_error(response.message)
|
|
171
171
|
|
|
172
|
-
print(f"Dataset {name} removed")
|
|
172
|
+
print(f"Dataset '{name}' removed from Studio")
|
|
173
173
|
|
|
174
174
|
|
|
175
175
|
def save_config(hostname, token):
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: datachain
|
|
3
|
-
Version: 0.7.
|
|
3
|
+
Version: 0.7.9
|
|
4
4
|
Summary: Wrangle unstructured AI data at scale
|
|
5
5
|
Author-email: Dmitry Petrov <support@dvc.org>
|
|
6
6
|
License: Apache-2.0
|
|
@@ -98,7 +98,7 @@ Requires-Dist: unstructured[embed-huggingface,pdf]<0.16.0; extra == "examples"
|
|
|
98
98
|
Requires-Dist: pdfplumber==0.11.4; extra == "examples"
|
|
99
99
|
Requires-Dist: huggingface_hub[hf_transfer]; extra == "examples"
|
|
100
100
|
Requires-Dist: onnx==1.16.1; extra == "examples"
|
|
101
|
-
Requires-Dist: ultralytics==8.3.
|
|
101
|
+
Requires-Dist: ultralytics==8.3.37; extra == "examples"
|
|
102
102
|
|
|
103
103
|
================
|
|
104
104
|
|logo| DataChain
|
|
@@ -2,7 +2,7 @@ datachain/__init__.py,sha256=ofPJ6B-d-ybSDRrE7J6wqF_ZRAB2W9U8l-eeuBtqPLg,865
|
|
|
2
2
|
datachain/__main__.py,sha256=hG3Y4ARGEqe1AWwNMd259rBlqtphx1Wk39YbueQ0yV8,91
|
|
3
3
|
datachain/asyn.py,sha256=5aKrjnUxk0mtnZeFKNJd1DCE0MsnSoyJBZkr0y9H_a0,9313
|
|
4
4
|
datachain/cache.py,sha256=s0YHN7qurmQv-eC265TjeureK84TebWWAnL07cxchZQ,2997
|
|
5
|
-
datachain/cli.py,sha256=
|
|
5
|
+
datachain/cli.py,sha256=wQiYQ_qSVCGvS06pkknT9_FIBdFRzBdeRusW9uXE3vQ,42505
|
|
6
6
|
datachain/cli_utils.py,sha256=jrn9ejGXjybeO1ur3fjdSiAyCHZrX0qsLLbJzN9ErPM,2418
|
|
7
7
|
datachain/config.py,sha256=g8qbNV0vW2VEKpX-dGZ9pAn0DAz6G2ZFcr7SAV3PoSM,4272
|
|
8
8
|
datachain/dataset.py,sha256=P-pDBgvPqJGDhq_I7fwCfb6hY8E8mIAO8Q0NT7SNlNE,19128
|
|
@@ -14,7 +14,7 @@ datachain/nodes_fetcher.py,sha256=ILMzUW5o4_6lUOVrLDC9gJPCXfcgKnMG68plrc7dAOA,11
|
|
|
14
14
|
datachain/nodes_thread_pool.py,sha256=uPo-xl8zG5m9YgODjPFBpbcqqHjI-dcxH87yAbj_qco,3192
|
|
15
15
|
datachain/progress.py,sha256=5KotcvvzAUL_RF0GEj4JY0IB1lyImnmHxe89YkT1XO4,4330
|
|
16
16
|
datachain/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
17
|
-
datachain/studio.py,sha256=
|
|
17
|
+
datachain/studio.py,sha256=Hr0Ha0kou0so4i8i-gWiXC1AYlJ2arI1D55cc7mi3tg,7253
|
|
18
18
|
datachain/telemetry.py,sha256=0A4IOPPp9VlP5pyW9eBfaTK3YhHGzHl7dQudQjUAx9A,994
|
|
19
19
|
datachain/utils.py,sha256=-mSFowjIidJ4_sMXInvNHLn4rK_QnHuIlLuH1_lMGmI,13897
|
|
20
20
|
datachain/catalog/__init__.py,sha256=g2iAAFx_gEIrqshXlhSEbrc8qDaEH11cjU40n3CHDz4,409
|
|
@@ -32,7 +32,7 @@ datachain/client/s3.py,sha256=CVHBUZ1Ic2Q3370nl-Bbe69phuWjFlrVv9dTJKBpRT0,6019
|
|
|
32
32
|
datachain/data_storage/__init__.py,sha256=9Wit-oe5P46V7CJQTD0BJ5MhOa2Y9h3ddJ4VWTe-Lec,273
|
|
33
33
|
datachain/data_storage/db_engine.py,sha256=81Ol1of9TTTzD97ORajCnP366Xz2mEJt6C-kTUCaru4,3406
|
|
34
34
|
datachain/data_storage/job.py,sha256=w-7spowjkOa1P5fUVtJou3OltT0L48P0RYWZ9rSJ9-s,383
|
|
35
|
-
datachain/data_storage/metastore.py,sha256=
|
|
35
|
+
datachain/data_storage/metastore.py,sha256=hfTITcesE9XlUTxcCcdDyWGGep-QSjJL9DUxko5QCeI,37524
|
|
36
36
|
datachain/data_storage/schema.py,sha256=-QVlRvD0dfu-ZFUxylEoSnLJLnleMEjVlcAb2OGu-AY,9895
|
|
37
37
|
datachain/data_storage/serializer.py,sha256=6G2YtOFqqDzJf1KbvZraKGXl2XHZyVml2krunWUum5o,927
|
|
38
38
|
datachain/data_storage/sqlite.py,sha256=D_ZQ0PHmZzHO2dinv4naVJocUDIZUwV4WAz692C1cyk,22521
|
|
@@ -53,7 +53,7 @@ datachain/lib/arrow.py,sha256=b5efxAUaNNYVwtXVJqj07D3zf5KC-BPlLCxKEZbEG6w,9429
|
|
|
53
53
|
datachain/lib/clip.py,sha256=lm5CzVi4Cj1jVLEKvERKArb-egb9j1Ls-fwTItT6vlI,6150
|
|
54
54
|
datachain/lib/data_model.py,sha256=zS4lmXHVBXc9ntcyea2a1CRLXGSAN_0glXcF88CohgY,2685
|
|
55
55
|
datachain/lib/dataset_info.py,sha256=IjdF1E0TQNOq9YyynfWiCFTeZpbyGfyJvxgJY4YN810,2493
|
|
56
|
-
datachain/lib/dc.py,sha256=
|
|
56
|
+
datachain/lib/dc.py,sha256=xqLR4IH_mbuet0FsxBHDsRUg-zR6tO8UZdLQQTLG8EE,89533
|
|
57
57
|
datachain/lib/file.py,sha256=-XMkL6ED1sE7TMhWoMRTEuOXswZJw8X6AEmJDONFP74,15019
|
|
58
58
|
datachain/lib/hf.py,sha256=a-zFpDmZIR4r8dlNNTjfpAKSnuJ9xyRXlgcdENiXt3E,5864
|
|
59
59
|
datachain/lib/image.py,sha256=AMXYwQsmarZjRbPCZY3M1jDsM2WAB_b3cTY4uOIuXNU,2675
|
|
@@ -61,7 +61,7 @@ datachain/lib/listing.py,sha256=cVkCp7TRVpcZKSx-Bbk9t51bQI9Mw0o86W6ZPhAsuzM,3667
|
|
|
61
61
|
datachain/lib/listing_info.py,sha256=9ua40Hw0aiQByUw3oAEeNzMavJYfW0Uhe8YdCTK-m_g,1110
|
|
62
62
|
datachain/lib/meta_formats.py,sha256=anK2bDVbaeCCh0yvKUBaW2MVos3zRgdaSV8uSduzPcU,6680
|
|
63
63
|
datachain/lib/model_store.py,sha256=DNIv8Y6Jtk1_idNLzIpsThOsdW2BMAudyUCbPUcgcxk,2515
|
|
64
|
-
datachain/lib/pytorch.py,sha256=
|
|
64
|
+
datachain/lib/pytorch.py,sha256=QMJO_OGEMvBi2x71vGcG25agLzNwyLmF4Qx5iILlwaM,6350
|
|
65
65
|
datachain/lib/settings.py,sha256=ZELRCTLbi5vzRPiDX6cQ9LLg9TefJ_A05gIGni0lll8,2535
|
|
66
66
|
datachain/lib/signal_schema.py,sha256=_uh19nCKhiD9ua8oIN1Q8R9iYv1BZAuqTJCLYVmyW8k,24557
|
|
67
67
|
datachain/lib/tar.py,sha256=3WIzao6yD5fbLqXLTt9GhPGNonbFIs_fDRu-9vgLgsA,1038
|
|
@@ -96,7 +96,7 @@ datachain/query/queue.py,sha256=waqM_KzavU8C-G95-4211Nd4GXna_u2747Chgwtgz2w,3839
|
|
|
96
96
|
datachain/query/schema.py,sha256=b_KnVy6B26Ol4nYG0LqNNpeQ1QYPk95YRGUjXfdaQWs,6606
|
|
97
97
|
datachain/query/session.py,sha256=vvLIJ5b8eElovHLAWq_CZJXmN5t7C7iAZA7x9wPPOms,5905
|
|
98
98
|
datachain/remote/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
99
|
-
datachain/remote/studio.py,sha256=
|
|
99
|
+
datachain/remote/studio.py,sha256=WiK6fpRAw0a6Dth4XXI0YInEHH4gDU7AUHHDNd3wJzg,11616
|
|
100
100
|
datachain/sql/__init__.py,sha256=6SQRdbljO3d2hx3EAVXEZrHQKv5jth0Jh98PogT59No,262
|
|
101
101
|
datachain/sql/selectable.py,sha256=cTc60qVoAwqqss0Vop8Lt5Z-ROnM1XrQmL_GLjRxhXs,1765
|
|
102
102
|
datachain/sql/types.py,sha256=ASSPkmM5EzdRindqj2O7WHLXq8VHAgFYedG8lYfGvVI,14045
|
|
@@ -118,9 +118,9 @@ datachain/sql/sqlite/vector.py,sha256=ncW4eu2FlJhrP_CIpsvtkUabZlQdl2D5Lgwy_cbfqR
|
|
|
118
118
|
datachain/toolkit/__init__.py,sha256=eQ58Q5Yf_Fgv1ZG0IO5dpB4jmP90rk8YxUWmPc1M2Bo,68
|
|
119
119
|
datachain/toolkit/split.py,sha256=ZgDcrNiKiPXZmKD591_1z9qRIXitu5zwAsoVPB7ykiU,2508
|
|
120
120
|
datachain/torch/__init__.py,sha256=gIS74PoEPy4TB3X6vx9nLO0Y3sLJzsA8ckn8pRWihJM,579
|
|
121
|
-
datachain-0.7.
|
|
122
|
-
datachain-0.7.
|
|
123
|
-
datachain-0.7.
|
|
124
|
-
datachain-0.7.
|
|
125
|
-
datachain-0.7.
|
|
126
|
-
datachain-0.7.
|
|
121
|
+
datachain-0.7.9.dist-info/LICENSE,sha256=8DnqK5yoPI_E50bEg_zsHKZHY2HqPy4rYN338BHQaRA,11344
|
|
122
|
+
datachain-0.7.9.dist-info/METADATA,sha256=iu58cwfGQVYTwn53symALXVpe9292EWXdOly2MWuPZY,18006
|
|
123
|
+
datachain-0.7.9.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
|
|
124
|
+
datachain-0.7.9.dist-info/entry_points.txt,sha256=0GMJS6B_KWq0m3VT98vQI2YZodAMkn4uReZ_okga9R4,49
|
|
125
|
+
datachain-0.7.9.dist-info/top_level.txt,sha256=lZPpdU_2jJABLNIg2kvEOBi8PtsYikbN1OdMLHk8bTg,10
|
|
126
|
+
datachain-0.7.9.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|