datachain 0.8.3__py3-none-any.whl → 0.8.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of datachain might be problematic. Click here for more details.
- datachain/cli/__init__.py +311 -0
- datachain/cli/commands/__init__.py +29 -0
- datachain/cli/commands/datasets.py +129 -0
- datachain/cli/commands/du.py +14 -0
- datachain/cli/commands/index.py +12 -0
- datachain/cli/commands/ls.py +169 -0
- datachain/cli/commands/misc.py +28 -0
- datachain/cli/commands/query.py +53 -0
- datachain/cli/commands/show.py +38 -0
- datachain/cli/parser/__init__.py +547 -0
- datachain/cli/parser/job.py +120 -0
- datachain/cli/parser/studio.py +126 -0
- datachain/cli/parser/utils.py +63 -0
- datachain/{cli_utils.py → cli/utils.py} +27 -1
- datachain/client/fsspec.py +8 -2
- datachain/func/__init__.py +2 -2
- datachain/func/conditional.py +52 -0
- datachain/func/func.py +5 -1
- datachain/lib/arrow.py +4 -0
- datachain/lib/dc.py +3 -0
- datachain/lib/file.py +1 -1
- datachain/lib/listing.py +19 -1
- datachain/lib/signal_schema.py +89 -27
- datachain/progress.py +2 -2
- datachain/studio.py +58 -38
- datachain/utils.py +1 -1
- {datachain-0.8.3.dist-info → datachain-0.8.4.dist-info}/METADATA +5 -5
- {datachain-0.8.3.dist-info → datachain-0.8.4.dist-info}/RECORD +32 -20
- {datachain-0.8.3.dist-info → datachain-0.8.4.dist-info}/WHEEL +1 -1
- datachain/cli.py +0 -1475
- {datachain-0.8.3.dist-info → datachain-0.8.4.dist-info}/LICENSE +0 -0
- {datachain-0.8.3.dist-info → datachain-0.8.4.dist-info}/entry_points.txt +0 -0
- {datachain-0.8.3.dist-info → datachain-0.8.4.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
def add_jobs_parser(subparsers, parent_parser) -> None:
|
|
2
|
+
jobs_help = "Commands to handle the Job running with Iterative Studio"
|
|
3
|
+
jobs_description = (
|
|
4
|
+
"This will help us to run, cancel and view the status of the job in Studio. "
|
|
5
|
+
)
|
|
6
|
+
jobs_parser = subparsers.add_parser(
|
|
7
|
+
"job", parents=[parent_parser], description=jobs_description, help=jobs_help
|
|
8
|
+
)
|
|
9
|
+
jobs_subparser = jobs_parser.add_subparsers(
|
|
10
|
+
dest="cmd",
|
|
11
|
+
help="Use `DataChain studio CMD --help` to display command-specific help.",
|
|
12
|
+
required=True,
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
studio_run_help = "Run a job in Studio"
|
|
16
|
+
studio_run_description = "This command runs a job in Studio."
|
|
17
|
+
|
|
18
|
+
studio_run_parser = jobs_subparser.add_parser(
|
|
19
|
+
"run",
|
|
20
|
+
parents=[parent_parser],
|
|
21
|
+
description=studio_run_description,
|
|
22
|
+
help=studio_run_help,
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
studio_run_parser.add_argument(
|
|
26
|
+
"query_file",
|
|
27
|
+
action="store",
|
|
28
|
+
help="The query file to run.",
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
studio_run_parser.add_argument(
|
|
32
|
+
"--team",
|
|
33
|
+
action="store",
|
|
34
|
+
default=None,
|
|
35
|
+
help="The team to run a job for. By default, it will use team from config.",
|
|
36
|
+
)
|
|
37
|
+
studio_run_parser.add_argument(
|
|
38
|
+
"--env-file",
|
|
39
|
+
action="store",
|
|
40
|
+
help="File containing environment variables to set for the job.",
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
studio_run_parser.add_argument(
|
|
44
|
+
"--env",
|
|
45
|
+
nargs="+",
|
|
46
|
+
help="Environment variable. Can be specified multiple times. Format: KEY=VALUE",
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
studio_run_parser.add_argument(
|
|
50
|
+
"--workers",
|
|
51
|
+
type=int,
|
|
52
|
+
help="Number of workers to use for the job.",
|
|
53
|
+
)
|
|
54
|
+
studio_run_parser.add_argument(
|
|
55
|
+
"--files",
|
|
56
|
+
nargs="+",
|
|
57
|
+
help="Files to include in the job.",
|
|
58
|
+
)
|
|
59
|
+
studio_run_parser.add_argument(
|
|
60
|
+
"--python-version",
|
|
61
|
+
action="store",
|
|
62
|
+
help="Python version to use for the job (e.g. '3.9', '3.10', '3.11').",
|
|
63
|
+
)
|
|
64
|
+
studio_run_parser.add_argument(
|
|
65
|
+
"--req-file",
|
|
66
|
+
action="store",
|
|
67
|
+
help="File containing Python package requirements.",
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
studio_run_parser.add_argument(
|
|
71
|
+
"--req",
|
|
72
|
+
nargs="+",
|
|
73
|
+
help="Python package requirement. Can be specified multiple times.",
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
studio_cancel_help = "Cancel a job in Studio"
|
|
77
|
+
studio_cancel_description = "This command cancels a job in Studio."
|
|
78
|
+
|
|
79
|
+
studio_cancel_parser = jobs_subparser.add_parser(
|
|
80
|
+
"cancel",
|
|
81
|
+
parents=[parent_parser],
|
|
82
|
+
description=studio_cancel_description,
|
|
83
|
+
help=studio_cancel_help,
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
studio_cancel_parser.add_argument(
|
|
87
|
+
"job_id",
|
|
88
|
+
action="store",
|
|
89
|
+
help="The job ID to cancel.",
|
|
90
|
+
)
|
|
91
|
+
studio_cancel_parser.add_argument(
|
|
92
|
+
"--team",
|
|
93
|
+
action="store",
|
|
94
|
+
default=None,
|
|
95
|
+
help="The team to cancel a job for. By default, it will use team from config.",
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
studio_log_help = "Show the logs and latest status of Jobs in Studio"
|
|
99
|
+
studio_log_description = (
|
|
100
|
+
"This will display the logs and latest status of jobs in Studio"
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
studio_log_parser = jobs_subparser.add_parser(
|
|
104
|
+
"logs",
|
|
105
|
+
parents=[parent_parser],
|
|
106
|
+
description=studio_log_description,
|
|
107
|
+
help=studio_log_help,
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
studio_log_parser.add_argument(
|
|
111
|
+
"job_id",
|
|
112
|
+
action="store",
|
|
113
|
+
help="The job ID to show the logs.",
|
|
114
|
+
)
|
|
115
|
+
studio_log_parser.add_argument(
|
|
116
|
+
"--team",
|
|
117
|
+
action="store",
|
|
118
|
+
default=None,
|
|
119
|
+
help="The team to check the logs. By default, it will use team from config.",
|
|
120
|
+
)
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
def add_studio_parser(subparsers, parent_parser) -> None:
|
|
2
|
+
studio_help = "Commands to authenticate DataChain with Iterative Studio"
|
|
3
|
+
studio_description = (
|
|
4
|
+
"Authenticate DataChain with Studio and set the token. "
|
|
5
|
+
"Once this token has been properly configured,\n"
|
|
6
|
+
"DataChain will utilize it for seamlessly sharing datasets\n"
|
|
7
|
+
"and using Studio features from CLI"
|
|
8
|
+
)
|
|
9
|
+
|
|
10
|
+
studio_parser = subparsers.add_parser(
|
|
11
|
+
"studio",
|
|
12
|
+
parents=[parent_parser],
|
|
13
|
+
description=studio_description,
|
|
14
|
+
help=studio_help,
|
|
15
|
+
)
|
|
16
|
+
studio_subparser = studio_parser.add_subparsers(
|
|
17
|
+
dest="cmd",
|
|
18
|
+
help="Use `DataChain studio CMD --help` to display command-specific help.",
|
|
19
|
+
required=True,
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
studio_login_help = "Authenticate DataChain with Studio host"
|
|
23
|
+
studio_login_description = (
|
|
24
|
+
"By default, this command authenticates the DataChain with Studio\n"
|
|
25
|
+
"using default scopes and assigns a random name as the token name."
|
|
26
|
+
)
|
|
27
|
+
login_parser = studio_subparser.add_parser(
|
|
28
|
+
"login",
|
|
29
|
+
parents=[parent_parser],
|
|
30
|
+
description=studio_login_description,
|
|
31
|
+
help=studio_login_help,
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
login_parser.add_argument(
|
|
35
|
+
"-H",
|
|
36
|
+
"--hostname",
|
|
37
|
+
action="store",
|
|
38
|
+
default=None,
|
|
39
|
+
help="The hostname of the Studio instance to authenticate with.",
|
|
40
|
+
)
|
|
41
|
+
login_parser.add_argument(
|
|
42
|
+
"-s",
|
|
43
|
+
"--scopes",
|
|
44
|
+
action="store",
|
|
45
|
+
default=None,
|
|
46
|
+
help="The scopes for the authentication token. ",
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
login_parser.add_argument(
|
|
50
|
+
"-n",
|
|
51
|
+
"--name",
|
|
52
|
+
action="store",
|
|
53
|
+
default=None,
|
|
54
|
+
help="The name of the authentication token. It will be used to\n"
|
|
55
|
+
"identify token shown in Studio profile.",
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
login_parser.add_argument(
|
|
59
|
+
"--no-open",
|
|
60
|
+
action="store_true",
|
|
61
|
+
default=False,
|
|
62
|
+
help="Use authentication flow based on user code.\n"
|
|
63
|
+
"You will be presented with user code to enter in browser.\n"
|
|
64
|
+
"DataChain will also use this if it cannot launch browser on your behalf.",
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
studio_logout_help = "Logout user from Studio"
|
|
68
|
+
studio_logout_description = "This removes the studio token from your global config."
|
|
69
|
+
|
|
70
|
+
studio_subparser.add_parser(
|
|
71
|
+
"logout",
|
|
72
|
+
parents=[parent_parser],
|
|
73
|
+
description=studio_logout_description,
|
|
74
|
+
help=studio_logout_help,
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
studio_team_help = "Set the default team for DataChain"
|
|
78
|
+
studio_team_description = (
|
|
79
|
+
"Set the default team for DataChain to use when interacting with Studio."
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
team_parser = studio_subparser.add_parser(
|
|
83
|
+
"team",
|
|
84
|
+
parents=[parent_parser],
|
|
85
|
+
description=studio_team_description,
|
|
86
|
+
help=studio_team_help,
|
|
87
|
+
)
|
|
88
|
+
team_parser.add_argument(
|
|
89
|
+
"team_name",
|
|
90
|
+
action="store",
|
|
91
|
+
help="The name of the team to set as the default.",
|
|
92
|
+
)
|
|
93
|
+
team_parser.add_argument(
|
|
94
|
+
"--global",
|
|
95
|
+
action="store_true",
|
|
96
|
+
default=False,
|
|
97
|
+
help="Set the team globally for all DataChain projects.",
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
studio_token_help = "View the token datachain uses to contact Studio" # noqa: S105 # nosec B105
|
|
101
|
+
|
|
102
|
+
studio_subparser.add_parser(
|
|
103
|
+
"token",
|
|
104
|
+
parents=[parent_parser],
|
|
105
|
+
description=studio_token_help,
|
|
106
|
+
help=studio_token_help,
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
studio_ls_dataset_help = "List the available datasets from Studio"
|
|
110
|
+
studio_ls_dataset_description = (
|
|
111
|
+
"This command lists all the datasets available in Studio.\n"
|
|
112
|
+
"It will show the dataset name and the number of versions available."
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
ls_dataset_parser = studio_subparser.add_parser(
|
|
116
|
+
"dataset",
|
|
117
|
+
parents=[parent_parser],
|
|
118
|
+
description=studio_ls_dataset_description,
|
|
119
|
+
help=studio_ls_dataset_help,
|
|
120
|
+
)
|
|
121
|
+
ls_dataset_parser.add_argument(
|
|
122
|
+
"--team",
|
|
123
|
+
action="store",
|
|
124
|
+
default=None,
|
|
125
|
+
help="The team to list datasets for. By default, it will use team from config.",
|
|
126
|
+
)
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
from argparse import Action, ArgumentParser, ArgumentTypeError
|
|
2
|
+
from typing import Union
|
|
3
|
+
|
|
4
|
+
from datachain.cli.utils import CommaSeparatedArgs
|
|
5
|
+
|
|
6
|
+
FIND_COLUMNS = ["du", "name", "path", "size", "type"]
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def find_columns_type(
|
|
10
|
+
columns_str: str,
|
|
11
|
+
default_colums_str: str = "path",
|
|
12
|
+
) -> list[str]:
|
|
13
|
+
if not columns_str:
|
|
14
|
+
columns_str = default_colums_str
|
|
15
|
+
|
|
16
|
+
return [parse_find_column(c) for c in columns_str.split(",")]
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def parse_find_column(column: str) -> str:
|
|
20
|
+
column_lower = column.strip().lower()
|
|
21
|
+
if column_lower in FIND_COLUMNS:
|
|
22
|
+
return column_lower
|
|
23
|
+
raise ArgumentTypeError(
|
|
24
|
+
f"Invalid column for find: '{column}' Options are: {','.join(FIND_COLUMNS)}"
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def add_sources_arg(parser: ArgumentParser, nargs: Union[str, int] = "+") -> Action:
|
|
29
|
+
return parser.add_argument(
|
|
30
|
+
"sources",
|
|
31
|
+
type=str,
|
|
32
|
+
nargs=nargs,
|
|
33
|
+
help="Data sources - paths to cloud storage dirs",
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def add_show_args(parser: ArgumentParser) -> None:
|
|
38
|
+
parser.add_argument(
|
|
39
|
+
"--limit",
|
|
40
|
+
action="store",
|
|
41
|
+
default=10,
|
|
42
|
+
type=int,
|
|
43
|
+
help="Number of rows to show",
|
|
44
|
+
)
|
|
45
|
+
parser.add_argument(
|
|
46
|
+
"--offset",
|
|
47
|
+
action="store",
|
|
48
|
+
default=0,
|
|
49
|
+
type=int,
|
|
50
|
+
help="Number of rows to offset",
|
|
51
|
+
)
|
|
52
|
+
parser.add_argument(
|
|
53
|
+
"--columns",
|
|
54
|
+
default=[],
|
|
55
|
+
action=CommaSeparatedArgs,
|
|
56
|
+
help="Columns to show",
|
|
57
|
+
)
|
|
58
|
+
parser.add_argument(
|
|
59
|
+
"--no-collapse",
|
|
60
|
+
action="store_true",
|
|
61
|
+
default=False,
|
|
62
|
+
help="Do not collapse the columns",
|
|
63
|
+
)
|
|
@@ -1,4 +1,8 @@
|
|
|
1
|
-
|
|
1
|
+
import logging
|
|
2
|
+
from argparse import SUPPRESS, Action, ArgumentError, Namespace, _AppendAction
|
|
3
|
+
from typing import Optional
|
|
4
|
+
|
|
5
|
+
from datachain.error import DataChainError
|
|
2
6
|
|
|
3
7
|
|
|
4
8
|
class BooleanOptionalAction(Action):
|
|
@@ -70,3 +74,25 @@ class KeyValueArgs(_AppendAction): # pylint: disable=protected-access
|
|
|
70
74
|
items[key.strip()] = value
|
|
71
75
|
|
|
72
76
|
setattr(namespace, self.dest, items)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def get_logging_level(args: Namespace) -> int:
|
|
80
|
+
if args.quiet:
|
|
81
|
+
return logging.CRITICAL
|
|
82
|
+
if args.verbose:
|
|
83
|
+
return logging.DEBUG
|
|
84
|
+
return logging.INFO
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def determine_flavors(studio: bool, local: bool, all: bool, token: Optional[str]):
|
|
88
|
+
if studio and not token:
|
|
89
|
+
raise DataChainError(
|
|
90
|
+
"Not logged in to Studio. Log in with 'datachain studio login'."
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
if local or studio:
|
|
94
|
+
all = False
|
|
95
|
+
|
|
96
|
+
all = all and not (local or studio)
|
|
97
|
+
|
|
98
|
+
return all, local, studio
|
datachain/client/fsspec.py
CHANGED
|
@@ -215,7 +215,7 @@ class Client(ABC):
|
|
|
215
215
|
info = await self.fs._info(
|
|
216
216
|
self.get_full_path(file.path, file.version), **kwargs
|
|
217
217
|
)
|
|
218
|
-
return self.info_to_file(info,
|
|
218
|
+
return self.info_to_file(info, file.path).etag
|
|
219
219
|
|
|
220
220
|
def get_file_info(self, path: str, version_id: Optional[str] = None) -> "File":
|
|
221
221
|
info = self.fs.info(self.get_full_path(path, version_id), version_id=version_id)
|
|
@@ -343,7 +343,7 @@ class Client(ABC):
|
|
|
343
343
|
return self.version_path(f"{self.PREFIX}{self.name}/{rel_path}", version_id)
|
|
344
344
|
|
|
345
345
|
@abstractmethod
|
|
346
|
-
def info_to_file(self, v: dict[str, Any],
|
|
346
|
+
def info_to_file(self, v: dict[str, Any], path: str) -> "File": ...
|
|
347
347
|
|
|
348
348
|
def fetch_nodes(
|
|
349
349
|
self,
|
|
@@ -390,6 +390,12 @@ class Client(ABC):
|
|
|
390
390
|
self.fs.open(self.get_full_path(file.path, file.version)), cb
|
|
391
391
|
) # type: ignore[return-value]
|
|
392
392
|
|
|
393
|
+
def upload(self, path: str, data: bytes) -> "File":
|
|
394
|
+
full_path = self.get_full_path(path)
|
|
395
|
+
self.fs.pipe_file(full_path, data)
|
|
396
|
+
file_info = self.fs.info(full_path)
|
|
397
|
+
return self.info_to_file(file_info, path)
|
|
398
|
+
|
|
393
399
|
def download(self, file: "File", *, callback: Callback = DEFAULT_CALLBACK) -> None:
|
|
394
400
|
sync(get_loop(), functools.partial(self._download, file, callback=callback))
|
|
395
401
|
|
datachain/func/__init__.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from sqlalchemy import
|
|
1
|
+
from sqlalchemy import literal
|
|
2
2
|
|
|
3
3
|
from . import array, path, random, string
|
|
4
4
|
from .aggregate import (
|
|
@@ -16,7 +16,7 @@ from .aggregate import (
|
|
|
16
16
|
sum,
|
|
17
17
|
)
|
|
18
18
|
from .array import cosine_distance, euclidean_distance, length, sip_hash_64
|
|
19
|
-
from .conditional import greatest, least
|
|
19
|
+
from .conditional import case, greatest, least
|
|
20
20
|
from .numeric import bit_and, bit_hamming_distance, bit_or, bit_xor, int_hash_64
|
|
21
21
|
from .random import rand
|
|
22
22
|
from .string import byte_hamming_distance
|
datachain/func/conditional.py
CHANGED
|
@@ -1,5 +1,9 @@
|
|
|
1
1
|
from typing import Union
|
|
2
2
|
|
|
3
|
+
from sqlalchemy import case as sql_case
|
|
4
|
+
from sqlalchemy.sql.elements import BinaryExpression
|
|
5
|
+
|
|
6
|
+
from datachain.lib.utils import DataChainParamsError
|
|
3
7
|
from datachain.sql.functions import conditional
|
|
4
8
|
|
|
5
9
|
from .func import ColT, Func
|
|
@@ -79,3 +83,51 @@ def least(*args: Union[ColT, float]) -> Func:
|
|
|
79
83
|
return Func(
|
|
80
84
|
"least", inner=conditional.least, cols=cols, args=func_args, result_type=int
|
|
81
85
|
)
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def case(
|
|
89
|
+
*args: tuple[BinaryExpression, Union[int, float, complex, bool, str]], else_=None
|
|
90
|
+
) -> Func:
|
|
91
|
+
"""
|
|
92
|
+
Returns the case function that produces case expression which has a list of
|
|
93
|
+
conditions and corresponding results. Results can only be python primitives
|
|
94
|
+
like string, numbes or booleans. Result type is inferred from condition results.
|
|
95
|
+
|
|
96
|
+
Args:
|
|
97
|
+
args (tuple(BinaryExpression, value(str | int | float | complex | bool):
|
|
98
|
+
- Tuple of binary expression and values pair which corresponds to one
|
|
99
|
+
case condition - value
|
|
100
|
+
else_ (str | int | float | complex | bool): else value in case expression
|
|
101
|
+
|
|
102
|
+
Returns:
|
|
103
|
+
Func: A Func object that represents the case function.
|
|
104
|
+
|
|
105
|
+
Example:
|
|
106
|
+
```py
|
|
107
|
+
dc.mutate(
|
|
108
|
+
res=func.case((C("num") > 0, "P"), (C("num") < 0, "N"), else_="Z"),
|
|
109
|
+
)
|
|
110
|
+
```
|
|
111
|
+
|
|
112
|
+
Note:
|
|
113
|
+
- Result column will always be of the same type as the input columns.
|
|
114
|
+
"""
|
|
115
|
+
supported_types = [int, float, complex, str, bool]
|
|
116
|
+
|
|
117
|
+
type_ = type(else_) if else_ else None
|
|
118
|
+
|
|
119
|
+
if not args:
|
|
120
|
+
raise DataChainParamsError("Missing case statements")
|
|
121
|
+
|
|
122
|
+
for arg in args:
|
|
123
|
+
if type_ and not isinstance(arg[1], type_):
|
|
124
|
+
raise DataChainParamsError("Case statement values must be of the same type")
|
|
125
|
+
type_ = type(arg[1])
|
|
126
|
+
|
|
127
|
+
if type_ not in supported_types:
|
|
128
|
+
raise DataChainParamsError(
|
|
129
|
+
f"Case supports only python literals ({supported_types}) for values"
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
kwargs = {"else_": else_}
|
|
133
|
+
return Func("case", inner=sql_case, args=args, kwargs=kwargs, result_type=type_)
|
datachain/func/func.py
CHANGED
|
@@ -35,6 +35,7 @@ class Func(Function):
|
|
|
35
35
|
inner: Callable,
|
|
36
36
|
cols: Optional[Sequence[ColT]] = None,
|
|
37
37
|
args: Optional[Sequence[Any]] = None,
|
|
38
|
+
kwargs: Optional[dict[str, Any]] = None,
|
|
38
39
|
result_type: Optional["DataType"] = None,
|
|
39
40
|
is_array: bool = False,
|
|
40
41
|
is_window: bool = False,
|
|
@@ -45,6 +46,7 @@ class Func(Function):
|
|
|
45
46
|
self.inner = inner
|
|
46
47
|
self.cols = cols or []
|
|
47
48
|
self.args = args or []
|
|
49
|
+
self.kwargs = kwargs or {}
|
|
48
50
|
self.result_type = result_type
|
|
49
51
|
self.is_array = is_array
|
|
50
52
|
self.is_window = is_window
|
|
@@ -63,6 +65,7 @@ class Func(Function):
|
|
|
63
65
|
self.inner,
|
|
64
66
|
self.cols,
|
|
65
67
|
self.args,
|
|
68
|
+
self.kwargs,
|
|
66
69
|
self.result_type,
|
|
67
70
|
self.is_array,
|
|
68
71
|
self.is_window,
|
|
@@ -333,6 +336,7 @@ class Func(Function):
|
|
|
333
336
|
self.inner,
|
|
334
337
|
self.cols,
|
|
335
338
|
self.args,
|
|
339
|
+
self.kwargs,
|
|
336
340
|
self.result_type,
|
|
337
341
|
self.is_array,
|
|
338
342
|
self.is_window,
|
|
@@ -387,7 +391,7 @@ class Func(Function):
|
|
|
387
391
|
return col
|
|
388
392
|
|
|
389
393
|
cols = [get_col(col) for col in self._db_cols]
|
|
390
|
-
func_col = self.inner(*cols, *self.args)
|
|
394
|
+
func_col = self.inner(*cols, *self.args, **self.kwargs)
|
|
391
395
|
|
|
392
396
|
if self.is_window:
|
|
393
397
|
if not self.window:
|
datachain/lib/arrow.py
CHANGED
|
@@ -149,6 +149,10 @@ def infer_schema(chain: "DataChain", **kwargs) -> pa.Schema:
|
|
|
149
149
|
for file in chain.collect("file"):
|
|
150
150
|
ds = dataset(file.get_path(), filesystem=file.get_fs(), **kwargs) # type: ignore[union-attr]
|
|
151
151
|
schemas.append(ds.schema)
|
|
152
|
+
if not schemas:
|
|
153
|
+
raise ValueError(
|
|
154
|
+
"Cannot infer schema (no files to process or can't access them)"
|
|
155
|
+
)
|
|
152
156
|
return pa.unify_schemas(schemas)
|
|
153
157
|
|
|
154
158
|
|
datachain/lib/dc.py
CHANGED
|
@@ -1882,6 +1882,9 @@ class DataChain:
|
|
|
1882
1882
|
"`nrows` only supported for csv and json formats.",
|
|
1883
1883
|
)
|
|
1884
1884
|
|
|
1885
|
+
if "file" not in self.schema or not self.count():
|
|
1886
|
+
raise DatasetPrepareError(self.name, "no files to parse.")
|
|
1887
|
+
|
|
1885
1888
|
schema = None
|
|
1886
1889
|
col_names = output if isinstance(output, Sequence) else None
|
|
1887
1890
|
if col_names or not output:
|
datachain/lib/file.py
CHANGED
|
@@ -364,7 +364,7 @@ class File(DataModel):
|
|
|
364
364
|
|
|
365
365
|
try:
|
|
366
366
|
info = client.fs.info(client.get_full_path(self.path))
|
|
367
|
-
converted_info = client.info_to_file(info, self.
|
|
367
|
+
converted_info = client.info_to_file(info, self.path)
|
|
368
368
|
return type(self)(
|
|
369
369
|
path=self.path,
|
|
370
370
|
source=self.source,
|
datachain/lib/listing.py
CHANGED
|
@@ -85,6 +85,24 @@ def ls(
|
|
|
85
85
|
return dc.filter(pathfunc.parent(_file_c("path")) == path.lstrip("/").rstrip("/*"))
|
|
86
86
|
|
|
87
87
|
|
|
88
|
+
def _isfile(client: "Client", path: str) -> bool:
|
|
89
|
+
"""
|
|
90
|
+
Returns True if uri points to a file
|
|
91
|
+
"""
|
|
92
|
+
try:
|
|
93
|
+
info = client.fs.info(path)
|
|
94
|
+
name = info.get("name")
|
|
95
|
+
# case for special simulated directories on some clouds
|
|
96
|
+
# e.g. Google creates a zero byte file with the same name as the
|
|
97
|
+
# directory with a trailing slash at the end
|
|
98
|
+
if not name or name.endswith("/"):
|
|
99
|
+
return False
|
|
100
|
+
|
|
101
|
+
return info["type"] == "file"
|
|
102
|
+
except: # noqa: E722
|
|
103
|
+
return False
|
|
104
|
+
|
|
105
|
+
|
|
88
106
|
def parse_listing_uri(uri: str, cache, client_config) -> tuple[Optional[str], str, str]:
|
|
89
107
|
"""
|
|
90
108
|
Parsing uri and returns listing dataset name, listing uri and listing path
|
|
@@ -94,7 +112,7 @@ def parse_listing_uri(uri: str, cache, client_config) -> tuple[Optional[str], st
|
|
|
94
112
|
storage_uri, path = Client.parse_url(uri)
|
|
95
113
|
telemetry.log_param("client", client.PREFIX)
|
|
96
114
|
|
|
97
|
-
if not uri.endswith("/") and client
|
|
115
|
+
if not uri.endswith("/") and _isfile(client, uri):
|
|
98
116
|
return None, f'{storage_uri}/{path.lstrip("/")}', path
|
|
99
117
|
if uses_glob(path):
|
|
100
118
|
lst_uri_path = posixpath.dirname(path)
|