datachain 0.19.1__py3-none-any.whl → 0.19.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of datachain might be problematic. Click here for more details.
- datachain/cli/parser/job.py +25 -0
- datachain/cli/parser/studio.py +11 -4
- datachain/data_storage/schema.py +23 -1
- datachain/lib/dc/listings.py +111 -0
- datachain/remote/studio.py +6 -0
- datachain/studio.py +45 -2
- {datachain-0.19.1.dist-info → datachain-0.19.3.dist-info}/METADATA +1 -1
- {datachain-0.19.1.dist-info → datachain-0.19.3.dist-info}/RECORD +12 -12
- {datachain-0.19.1.dist-info → datachain-0.19.3.dist-info}/WHEEL +0 -0
- {datachain-0.19.1.dist-info → datachain-0.19.3.dist-info}/entry_points.txt +0 -0
- {datachain-0.19.1.dist-info → datachain-0.19.3.dist-info}/licenses/LICENSE +0 -0
- {datachain-0.19.1.dist-info → datachain-0.19.3.dist-info}/top_level.txt +0 -0
datachain/cli/parser/job.py
CHANGED
|
@@ -51,6 +51,13 @@ def add_jobs_parser(subparsers, parent_parser) -> None:
|
|
|
51
51
|
help="Environment variables in KEY=VALUE format",
|
|
52
52
|
)
|
|
53
53
|
|
|
54
|
+
studio_run_parser.add_argument(
|
|
55
|
+
"--cluster",
|
|
56
|
+
type=str,
|
|
57
|
+
action="store",
|
|
58
|
+
help="Compute cluster to run the job on",
|
|
59
|
+
)
|
|
60
|
+
|
|
54
61
|
studio_run_parser.add_argument(
|
|
55
62
|
"--workers",
|
|
56
63
|
type=int,
|
|
@@ -165,3 +172,21 @@ def add_jobs_parser(subparsers, parent_parser) -> None:
|
|
|
165
172
|
default=None,
|
|
166
173
|
help="Team to check logs for (default: from config)",
|
|
167
174
|
)
|
|
175
|
+
|
|
176
|
+
studio_clusters_help = "List compute clusters in Studio"
|
|
177
|
+
studio_clusters_description = "List compute clusters in Studio."
|
|
178
|
+
|
|
179
|
+
studio_clusters_parser = jobs_subparser.add_parser(
|
|
180
|
+
"clusters",
|
|
181
|
+
parents=[parent_parser],
|
|
182
|
+
description=studio_clusters_description,
|
|
183
|
+
help=studio_clusters_help,
|
|
184
|
+
formatter_class=CustomHelpFormatter,
|
|
185
|
+
)
|
|
186
|
+
|
|
187
|
+
studio_clusters_parser.add_argument(
|
|
188
|
+
"--team",
|
|
189
|
+
action="store",
|
|
190
|
+
default=None,
|
|
191
|
+
help="Team to list clusters for (default: from config)",
|
|
192
|
+
)
|
datachain/cli/parser/studio.py
CHANGED
|
@@ -89,8 +89,13 @@ def add_auth_parser(subparsers, parent_parser) -> None:
|
|
|
89
89
|
help="Remove the token from the local project config",
|
|
90
90
|
)
|
|
91
91
|
|
|
92
|
-
auth_team_help = "Set default team for Studio operations"
|
|
93
|
-
auth_team_description =
|
|
92
|
+
auth_team_help = "Set or show default team for Studio operations"
|
|
93
|
+
auth_team_description = (
|
|
94
|
+
"Set or show the default team for Studio operations. "
|
|
95
|
+
"This will be used globally by default. "
|
|
96
|
+
"Use --local to set the team locally for the current project. "
|
|
97
|
+
"If no team name is provided, the default team will be shown."
|
|
98
|
+
)
|
|
94
99
|
|
|
95
100
|
team_parser = auth_subparser.add_parser(
|
|
96
101
|
"team",
|
|
@@ -102,13 +107,15 @@ def add_auth_parser(subparsers, parent_parser) -> None:
|
|
|
102
107
|
team_parser.add_argument(
|
|
103
108
|
"team_name",
|
|
104
109
|
action="store",
|
|
110
|
+
default=None,
|
|
111
|
+
nargs="?",
|
|
105
112
|
help="Name of the team to set as default",
|
|
106
113
|
)
|
|
107
114
|
team_parser.add_argument(
|
|
108
|
-
"--
|
|
115
|
+
"--local",
|
|
109
116
|
action="store_true",
|
|
110
117
|
default=False,
|
|
111
|
-
help="Set team
|
|
118
|
+
help="Set team locally for the current project",
|
|
112
119
|
)
|
|
113
120
|
|
|
114
121
|
auth_token_help = "View Studio authentication token" # noqa: S105
|
datachain/data_storage/schema.py
CHANGED
|
@@ -13,7 +13,16 @@ from sqlalchemy.sql import func as f
|
|
|
13
13
|
from sqlalchemy.sql.expression import false, null, true
|
|
14
14
|
|
|
15
15
|
from datachain.sql.functions import path as pathfunc
|
|
16
|
-
from datachain.sql.types import
|
|
16
|
+
from datachain.sql.types import (
|
|
17
|
+
JSON,
|
|
18
|
+
Boolean,
|
|
19
|
+
DateTime,
|
|
20
|
+
Int,
|
|
21
|
+
Int64,
|
|
22
|
+
SQLType,
|
|
23
|
+
String,
|
|
24
|
+
UInt64,
|
|
25
|
+
)
|
|
17
26
|
|
|
18
27
|
if TYPE_CHECKING:
|
|
19
28
|
from sqlalchemy.engine.interfaces import Dialect
|
|
@@ -272,6 +281,19 @@ class DataTable:
|
|
|
272
281
|
),
|
|
273
282
|
]
|
|
274
283
|
|
|
284
|
+
@classmethod
|
|
285
|
+
def listing_columns(cls):
|
|
286
|
+
return [
|
|
287
|
+
sa.Column("file__source", String()),
|
|
288
|
+
sa.Column("file__path", String()),
|
|
289
|
+
sa.Column("file__size", Int64()),
|
|
290
|
+
sa.Column("file__version", String()),
|
|
291
|
+
sa.Column("file__etag", String()),
|
|
292
|
+
sa.Column("file__is_latest", Boolean()),
|
|
293
|
+
sa.Column("file__last_modified", DateTime()),
|
|
294
|
+
sa.Column("file__location", JSON()),
|
|
295
|
+
]
|
|
296
|
+
|
|
275
297
|
def dir_expansion(self):
|
|
276
298
|
return DirExpansion(self.column)
|
|
277
299
|
|
datachain/lib/dc/listings.py
CHANGED
|
@@ -3,19 +3,58 @@ from typing import (
|
|
|
3
3
|
Optional,
|
|
4
4
|
)
|
|
5
5
|
|
|
6
|
+
from datachain.lib.listing import LISTING_PREFIX, ls
|
|
6
7
|
from datachain.lib.listing_info import ListingInfo
|
|
8
|
+
from datachain.lib.settings import Settings
|
|
9
|
+
from datachain.lib.signal_schema import SignalSchema
|
|
7
10
|
from datachain.query import Session
|
|
11
|
+
from datachain.query.dataset import DatasetQuery, QueryStep, step_result
|
|
8
12
|
|
|
9
13
|
from .values import read_values
|
|
10
14
|
|
|
11
15
|
if TYPE_CHECKING:
|
|
12
16
|
from typing_extensions import ParamSpec
|
|
13
17
|
|
|
18
|
+
from datachain.dataset import DatasetVersion
|
|
19
|
+
from datachain.query.dataset import StepResult
|
|
20
|
+
|
|
14
21
|
from .datachain import DataChain
|
|
15
22
|
|
|
16
23
|
P = ParamSpec("P")
|
|
17
24
|
|
|
18
25
|
|
|
26
|
+
class ReadOnlyQueryStep(QueryStep):
|
|
27
|
+
"""
|
|
28
|
+
This step is used to read the dataset in read-only mode.
|
|
29
|
+
It is used to avoid the need to read the table metadata from the warehouse.
|
|
30
|
+
This is useful when we want to list the files in the dataset.
|
|
31
|
+
"""
|
|
32
|
+
|
|
33
|
+
def apply(self) -> "StepResult":
|
|
34
|
+
import sqlalchemy as sa
|
|
35
|
+
|
|
36
|
+
def q(*columns):
|
|
37
|
+
return sa.select(*columns)
|
|
38
|
+
|
|
39
|
+
table_name = self.catalog.warehouse.dataset_table_name(
|
|
40
|
+
self.dataset_name, self.dataset_version
|
|
41
|
+
)
|
|
42
|
+
dataset_row_cls = self.catalog.warehouse.schema.dataset_row_cls
|
|
43
|
+
table = dataset_row_cls.new_table(
|
|
44
|
+
table_name,
|
|
45
|
+
columns=(
|
|
46
|
+
[
|
|
47
|
+
*dataset_row_cls.sys_columns(),
|
|
48
|
+
*dataset_row_cls.listing_columns(),
|
|
49
|
+
]
|
|
50
|
+
),
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
return step_result(
|
|
54
|
+
q, table.columns, dependencies=[(self.dataset_name, self.dataset_version)]
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
|
|
19
58
|
def listings(
|
|
20
59
|
session: Optional[Session] = None,
|
|
21
60
|
in_memory: bool = False,
|
|
@@ -41,3 +80,75 @@ def listings(
|
|
|
41
80
|
output={column: ListingInfo},
|
|
42
81
|
**{column: catalog.listings()}, # type: ignore[arg-type]
|
|
43
82
|
)
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def read_listing_dataset(
|
|
86
|
+
name: str,
|
|
87
|
+
version: Optional[str] = None,
|
|
88
|
+
path: str = "",
|
|
89
|
+
session: Optional["Session"] = None,
|
|
90
|
+
settings: Optional[dict] = None,
|
|
91
|
+
) -> tuple["DataChain", "DatasetVersion"]:
|
|
92
|
+
"""Read a listing dataset and return a DataChain and listing version.
|
|
93
|
+
|
|
94
|
+
Args:
|
|
95
|
+
name: Name of the dataset
|
|
96
|
+
version: Version of the dataset
|
|
97
|
+
path: Path within the listing to read. Path can have globs.
|
|
98
|
+
session: Optional Session object to use for reading
|
|
99
|
+
settings: Optional settings dictionary to use for reading
|
|
100
|
+
|
|
101
|
+
Returns:
|
|
102
|
+
tuple[DataChain, DatasetVersion]: A tuple containing:
|
|
103
|
+
- DataChain configured for listing files
|
|
104
|
+
- DatasetVersion object for the specified listing version
|
|
105
|
+
|
|
106
|
+
Example:
|
|
107
|
+
```py
|
|
108
|
+
import datachain as dc
|
|
109
|
+
chain, listing_version = dc.read_listing_dataset(
|
|
110
|
+
"lst__s3://my-bucket/my-path", version="1.0.0", path="my-path"
|
|
111
|
+
)
|
|
112
|
+
chain.show()
|
|
113
|
+
```
|
|
114
|
+
"""
|
|
115
|
+
# Configure and return a DataChain for reading listing dataset files
|
|
116
|
+
# Uses ReadOnlyQueryStep to avoid warehouse metadata lookups
|
|
117
|
+
from datachain.lib.dc import Sys
|
|
118
|
+
from datachain.lib.file import File
|
|
119
|
+
|
|
120
|
+
from .datachain import DataChain
|
|
121
|
+
|
|
122
|
+
if not name.startswith(LISTING_PREFIX):
|
|
123
|
+
name = LISTING_PREFIX + name
|
|
124
|
+
|
|
125
|
+
session = Session.get(session)
|
|
126
|
+
dataset = session.catalog.get_dataset(name)
|
|
127
|
+
if version is None:
|
|
128
|
+
version = dataset.latest_version
|
|
129
|
+
|
|
130
|
+
query = DatasetQuery(
|
|
131
|
+
name=name,
|
|
132
|
+
session=session,
|
|
133
|
+
indexing_column_types=File._datachain_column_types,
|
|
134
|
+
fallback_to_studio=False,
|
|
135
|
+
)
|
|
136
|
+
if settings:
|
|
137
|
+
cfg = {**settings}
|
|
138
|
+
if "prefetch" not in cfg:
|
|
139
|
+
cfg["prefetch"] = 0
|
|
140
|
+
_settings = Settings(**cfg)
|
|
141
|
+
else:
|
|
142
|
+
_settings = Settings(prefetch=0)
|
|
143
|
+
signal_schema = SignalSchema({"sys": Sys, "file": File})
|
|
144
|
+
|
|
145
|
+
query.starting_step = ReadOnlyQueryStep(query.catalog, name, version)
|
|
146
|
+
query.version = version
|
|
147
|
+
# We already know that this is a listing dataset,
|
|
148
|
+
# so we can set the listing function to True
|
|
149
|
+
query.set_listing_fn(lambda: True)
|
|
150
|
+
|
|
151
|
+
chain = DataChain(query, _settings, signal_schema)
|
|
152
|
+
chain = ls(chain, path, recursive=True, column="file")
|
|
153
|
+
|
|
154
|
+
return chain, dataset.get_version(version)
|
datachain/remote/studio.py
CHANGED
|
@@ -30,6 +30,7 @@ DatasetExportSignedUrls = Optional[list[str]]
|
|
|
30
30
|
FileUploadData = Optional[dict[str, Any]]
|
|
31
31
|
JobData = Optional[dict[str, Any]]
|
|
32
32
|
JobListData = dict[str, Any]
|
|
33
|
+
ClusterListData = dict[str, Any]
|
|
33
34
|
logger = logging.getLogger("datachain")
|
|
34
35
|
|
|
35
36
|
DATASET_ROWS_CHUNK_SIZE = 8192
|
|
@@ -391,6 +392,7 @@ class StudioClient:
|
|
|
391
392
|
requirements: Optional[str] = None,
|
|
392
393
|
repository: Optional[str] = None,
|
|
393
394
|
priority: Optional[int] = None,
|
|
395
|
+
cluster: Optional[str] = None,
|
|
394
396
|
) -> Response[JobData]:
|
|
395
397
|
data = {
|
|
396
398
|
"query": query,
|
|
@@ -403,6 +405,7 @@ class StudioClient:
|
|
|
403
405
|
"requirements": requirements,
|
|
404
406
|
"repository": repository,
|
|
405
407
|
"priority": priority,
|
|
408
|
+
"compute_cluster_name": cluster,
|
|
406
409
|
}
|
|
407
410
|
return self._send_request("datachain/job", data)
|
|
408
411
|
|
|
@@ -423,3 +426,6 @@ class StudioClient:
|
|
|
423
426
|
) -> Response[JobData]:
|
|
424
427
|
url = f"datachain/job/{job_id}/cancel"
|
|
425
428
|
return self._send_request(url, data={}, method="POST")
|
|
429
|
+
|
|
430
|
+
def get_clusters(self) -> Response[ClusterListData]:
|
|
431
|
+
return self._send_request("datachain/clusters", {}, method="GET")
|
datachain/studio.py
CHANGED
|
@@ -41,6 +41,7 @@ def process_jobs_args(args: "Namespace"):
|
|
|
41
41
|
args.req,
|
|
42
42
|
args.req_file,
|
|
43
43
|
args.priority,
|
|
44
|
+
args.cluster,
|
|
44
45
|
)
|
|
45
46
|
|
|
46
47
|
if args.cmd == "cancel":
|
|
@@ -51,6 +52,9 @@ def process_jobs_args(args: "Namespace"):
|
|
|
51
52
|
if args.cmd == "ls":
|
|
52
53
|
return list_jobs(args.status, args.team, args.limit)
|
|
53
54
|
|
|
55
|
+
if args.cmd == "clusters":
|
|
56
|
+
return list_clusters(args.team)
|
|
57
|
+
|
|
54
58
|
raise DataChainError(f"Unknown command '{args.cmd}'.")
|
|
55
59
|
|
|
56
60
|
|
|
@@ -68,14 +72,24 @@ def process_auth_cli_args(args: "Namespace"):
|
|
|
68
72
|
return logout(args.local)
|
|
69
73
|
if args.cmd == "token":
|
|
70
74
|
return token()
|
|
71
|
-
|
|
72
75
|
if args.cmd == "team":
|
|
73
76
|
return set_team(args)
|
|
74
77
|
raise DataChainError(f"Unknown command '{args.cmd}'.")
|
|
75
78
|
|
|
76
79
|
|
|
77
80
|
def set_team(args: "Namespace"):
|
|
78
|
-
|
|
81
|
+
if args.team_name is None:
|
|
82
|
+
config = Config().read().get("studio", {})
|
|
83
|
+
team = config.get("team")
|
|
84
|
+
if team:
|
|
85
|
+
print(f"Default team is '{team}'")
|
|
86
|
+
return 0
|
|
87
|
+
|
|
88
|
+
raise DataChainError(
|
|
89
|
+
"No default team set. Use `datachain auth team <team_name>` to set one."
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
level = ConfigLevel.LOCAL if args.local else ConfigLevel.GLOBAL
|
|
79
93
|
config = Config(level)
|
|
80
94
|
with config.edit() as conf:
|
|
81
95
|
studio_conf = conf.get("studio", {})
|
|
@@ -121,6 +135,7 @@ def login(args: "Namespace"):
|
|
|
121
135
|
level = ConfigLevel.LOCAL if args.local else ConfigLevel.GLOBAL
|
|
122
136
|
config_path = save_config(hostname, access_token, level=level)
|
|
123
137
|
print(f"Authentication complete. Saved token to {config_path}.")
|
|
138
|
+
print("You can now use 'datachain auth team' to set the default team.")
|
|
124
139
|
return 0
|
|
125
140
|
|
|
126
141
|
|
|
@@ -268,6 +283,7 @@ def create_job(
|
|
|
268
283
|
req: Optional[list[str]] = None,
|
|
269
284
|
req_file: Optional[str] = None,
|
|
270
285
|
priority: Optional[int] = None,
|
|
286
|
+
cluster: Optional[str] = None,
|
|
271
287
|
):
|
|
272
288
|
query_type = "PYTHON" if query_file.endswith(".py") else "SHELL"
|
|
273
289
|
with open(query_file) as f:
|
|
@@ -297,6 +313,7 @@ def create_job(
|
|
|
297
313
|
repository=repository,
|
|
298
314
|
requirements=requirements,
|
|
299
315
|
priority=priority,
|
|
316
|
+
cluster=cluster,
|
|
300
317
|
)
|
|
301
318
|
if not response.ok:
|
|
302
319
|
raise DataChainError(response.message)
|
|
@@ -380,3 +397,29 @@ def show_job_logs(job_id: str, team_name: Optional[str]):
|
|
|
380
397
|
|
|
381
398
|
client = StudioClient(team=team_name)
|
|
382
399
|
show_logs_from_client(client, job_id)
|
|
400
|
+
|
|
401
|
+
|
|
402
|
+
def list_clusters(team_name: Optional[str]):
|
|
403
|
+
client = StudioClient(team=team_name)
|
|
404
|
+
response = client.get_clusters()
|
|
405
|
+
if not response.ok:
|
|
406
|
+
raise DataChainError(response.message)
|
|
407
|
+
|
|
408
|
+
clusters = response.data.get("clusters", [])
|
|
409
|
+
if not clusters:
|
|
410
|
+
print("No clusters found")
|
|
411
|
+
return
|
|
412
|
+
|
|
413
|
+
rows = [
|
|
414
|
+
{
|
|
415
|
+
"ID": cluster.get("id"),
|
|
416
|
+
"Status": cluster.get("status"),
|
|
417
|
+
"Cloud Provider": cluster.get("cloud_provider"),
|
|
418
|
+
"Cloud Credentials": cluster.get("cloud_credentials"),
|
|
419
|
+
"Is Active": cluster.get("is_active"),
|
|
420
|
+
"Max Workers": cluster.get("max_workers"),
|
|
421
|
+
}
|
|
422
|
+
for cluster in clusters
|
|
423
|
+
]
|
|
424
|
+
|
|
425
|
+
print(tabulate.tabulate(rows, headers="keys", tablefmt="grid"))
|
|
@@ -15,7 +15,7 @@ datachain/progress.py,sha256=lRzxoYP4Qv2XBwD78sOkmYRzHFpZ2ExVNJF8wAeICtY,770
|
|
|
15
15
|
datachain/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
16
16
|
datachain/script_meta.py,sha256=V-LaFOZG84pD0Zc0NvejYdzwDgzITv6yHvAHggDCnuY,4978
|
|
17
17
|
datachain/semver.py,sha256=UB8GHPBtAP3UJGeiuJoInD7SK-DnB93_Xd1qy_CQ9cU,2074
|
|
18
|
-
datachain/studio.py,sha256=
|
|
18
|
+
datachain/studio.py,sha256=vFRg7_8phTIRJnfbUYk-f9h6tLVy4dNQF2tgsMw2ujk,12357
|
|
19
19
|
datachain/telemetry.py,sha256=0A4IOPPp9VlP5pyW9eBfaTK3YhHGzHl7dQudQjUAx9A,994
|
|
20
20
|
datachain/utils.py,sha256=DNqOi-Ydb7InyWvD9m7_yailxz6-YGpZzh00biQaHNo,15305
|
|
21
21
|
datachain/catalog/__init__.py,sha256=cMZzSz3VoUi-6qXSVaHYN-agxQuAcz2XSqnEPZ55crE,353
|
|
@@ -33,8 +33,8 @@ datachain/cli/commands/misc.py,sha256=c0DmkOLwcDI2YhA8ArOuLJk6aGzSMZCiKL_E2JGibV
|
|
|
33
33
|
datachain/cli/commands/query.py,sha256=Xzfgh14nPVH-sclqX1tpZqgfdTugw5s_44v0D33z6FA,1505
|
|
34
34
|
datachain/cli/commands/show.py,sha256=Cf8wBs12h-xtdOzjU5GTDy2C8rF5HJSF0hDJYER1zH8,1606
|
|
35
35
|
datachain/cli/parser/__init__.py,sha256=sjCIcosAtZqa0m50GMQHqmCkZSYxKyZNwQ29XwRQlP0,15913
|
|
36
|
-
datachain/cli/parser/job.py,sha256=
|
|
37
|
-
datachain/cli/parser/studio.py,sha256=
|
|
36
|
+
datachain/cli/parser/job.py,sha256=_wqOOxGRXG_-xuQ35FaLUOwjw6w8HviWvoEpZZ7VBzI,5289
|
|
37
|
+
datachain/cli/parser/studio.py,sha256=Bo__LKM7qhJGgkyX8M_bCvgZ2Gvqq6r_X4t1NdtaBIY,3881
|
|
38
38
|
datachain/cli/parser/utils.py,sha256=rETdD-9Hq9A4OolgfT7jQw4aoawtbfmkdtH6E7nkhpI,2888
|
|
39
39
|
datachain/client/__init__.py,sha256=1kDpCPoibMXi1gExR4lTLc5pi-k6M5TANiwtXkPoLhU,49
|
|
40
40
|
datachain/client/azure.py,sha256=7yyAgANHfu9Kfh187MKNTT1guvu9Q-WYsi4vYoY3aew,3270
|
|
@@ -48,7 +48,7 @@ datachain/data_storage/__init__.py,sha256=9Wit-oe5P46V7CJQTD0BJ5MhOa2Y9h3ddJ4VWT
|
|
|
48
48
|
datachain/data_storage/db_engine.py,sha256=n8ojCbvVMPY2e3SG8fUaaD0b9GkVfpl_Naa_6EiHfWg,3788
|
|
49
49
|
datachain/data_storage/job.py,sha256=9r0OGwh22bHNIvLHqg8_-eJSP1YYB-BN5HOla5TdCxw,402
|
|
50
50
|
datachain/data_storage/metastore.py,sha256=1PaRTQbL7kjcU1BVjiLjXJLrrLzQtUvpqLmm0pwc1rU,39882
|
|
51
|
-
datachain/data_storage/schema.py,sha256=
|
|
51
|
+
datachain/data_storage/schema.py,sha256=o3JbURKXRg3IJyIVA4QjHHkn6byRuz7avbydU2FlvNY,9897
|
|
52
52
|
datachain/data_storage/serializer.py,sha256=6G2YtOFqqDzJf1KbvZraKGXl2XHZyVml2krunWUum5o,927
|
|
53
53
|
datachain/data_storage/sqlite.py,sha256=BB8x7jtBmHK9lwn2zTo4HgfTKWGF43JxOsGr38J8YV8,25698
|
|
54
54
|
datachain/data_storage/warehouse.py,sha256=imPm4R2V7TkqgGNSO2FGnKu03axU9UVLMfdUPfpwgHE,31747
|
|
@@ -103,7 +103,7 @@ datachain/lib/dc/datachain.py,sha256=cQjq6_OWQ_1JKvIqb8snl6mKfuBbpllPEao5ygVINog
|
|
|
103
103
|
datachain/lib/dc/datasets.py,sha256=g_bBGCUwAwNJypYSUQvrDDqnaw7nfXpvrEvUVPtWATY,11268
|
|
104
104
|
datachain/lib/dc/hf.py,sha256=PJl2wiLjdRsMz0SYbLT-6H8b-D5i2WjeH7li8HHOk_0,2145
|
|
105
105
|
datachain/lib/dc/json.py,sha256=dNijfJ-H92vU3soyR7X1IiDrWhm6yZIGG3bSnZkPdAE,2733
|
|
106
|
-
datachain/lib/dc/listings.py,sha256=
|
|
106
|
+
datachain/lib/dc/listings.py,sha256=8kX-eELQGHDuOAtavLRJ2iwXkdJQ2bIAv_Z5mKYDJbI,4667
|
|
107
107
|
datachain/lib/dc/pandas.py,sha256=ObueUXDUFKJGu380GmazdG02ARpKAHPhSaymfmOH13E,1489
|
|
108
108
|
datachain/lib/dc/parquet.py,sha256=zYcSgrWwyEDW9UxGUSVdIVsCu15IGEf0xL8KfWQqK94,1782
|
|
109
109
|
datachain/lib/dc/records.py,sha256=J1I69J2gFIBjRTGr2LG-5qn_rTVzRLcr2y3tVDrmHdg,3068
|
|
@@ -131,7 +131,7 @@ datachain/query/session.py,sha256=6_ydvPasurmc5tR11dzFj51DpUAo4NxXP9p4ltoTauc,67
|
|
|
131
131
|
datachain/query/udf.py,sha256=e753bDJzTNjGFQn1WGTvOAWSwjDbrFI1-_DDWkWN2ls,1343
|
|
132
132
|
datachain/query/utils.py,sha256=HaSDNH_XGvp_NIcXjcB7j4vJRPi4_tbztDWclYelHY4,1208
|
|
133
133
|
datachain/remote/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
134
|
-
datachain/remote/studio.py,sha256=
|
|
134
|
+
datachain/remote/studio.py,sha256=SZFmTTIHH7eRnWEGT-YCHsayeCIaLfh0UiasHIiA2s4,13915
|
|
135
135
|
datachain/sql/__init__.py,sha256=6SQRdbljO3d2hx3EAVXEZrHQKv5jth0Jh98PogT59No,262
|
|
136
136
|
datachain/sql/selectable.py,sha256=cTc60qVoAwqqss0Vop8Lt5Z-ROnM1XrQmL_GLjRxhXs,1765
|
|
137
137
|
datachain/sql/types.py,sha256=ASSPkmM5EzdRindqj2O7WHLXq8VHAgFYedG8lYfGvVI,14045
|
|
@@ -153,9 +153,9 @@ datachain/sql/sqlite/vector.py,sha256=ncW4eu2FlJhrP_CIpsvtkUabZlQdl2D5Lgwy_cbfqR
|
|
|
153
153
|
datachain/toolkit/__init__.py,sha256=eQ58Q5Yf_Fgv1ZG0IO5dpB4jmP90rk8YxUWmPc1M2Bo,68
|
|
154
154
|
datachain/toolkit/split.py,sha256=ktGWzY4kyzjWyR86dhvzw-Zhl0lVk_LOX3NciTac6qo,2914
|
|
155
155
|
datachain/torch/__init__.py,sha256=gIS74PoEPy4TB3X6vx9nLO0Y3sLJzsA8ckn8pRWihJM,579
|
|
156
|
-
datachain-0.19.
|
|
157
|
-
datachain-0.19.
|
|
158
|
-
datachain-0.19.
|
|
159
|
-
datachain-0.19.
|
|
160
|
-
datachain-0.19.
|
|
161
|
-
datachain-0.19.
|
|
156
|
+
datachain-0.19.3.dist-info/licenses/LICENSE,sha256=8DnqK5yoPI_E50bEg_zsHKZHY2HqPy4rYN338BHQaRA,11344
|
|
157
|
+
datachain-0.19.3.dist-info/METADATA,sha256=cepmI_PMLRlKrdTOLwdBhl-752g3WoG41tbwvaY-jiY,13281
|
|
158
|
+
datachain-0.19.3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
159
|
+
datachain-0.19.3.dist-info/entry_points.txt,sha256=0GMJS6B_KWq0m3VT98vQI2YZodAMkn4uReZ_okga9R4,49
|
|
160
|
+
datachain-0.19.3.dist-info/top_level.txt,sha256=lZPpdU_2jJABLNIg2kvEOBi8PtsYikbN1OdMLHk8bTg,10
|
|
161
|
+
datachain-0.19.3.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|