megfile 5.0.1__py3-none-any.whl → 5.0.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- megfile/__init__.py +44 -21
- megfile/cli.py +14 -1
- megfile/errors.py +39 -9
- megfile/pathlike.py +11 -3
- megfile/version.py +1 -1
- {megfile-5.0.1.dist-info → megfile-5.0.3.dist-info}/METADATA +1 -1
- {megfile-5.0.1.dist-info → megfile-5.0.3.dist-info}/RECORD +12 -12
- {megfile-5.0.1.dist-info → megfile-5.0.3.dist-info}/WHEEL +0 -0
- {megfile-5.0.1.dist-info → megfile-5.0.3.dist-info}/entry_points.txt +0 -0
- {megfile-5.0.1.dist-info → megfile-5.0.3.dist-info}/licenses/LICENSE +0 -0
- {megfile-5.0.1.dist-info → megfile-5.0.3.dist-info}/licenses/LICENSE.pyre +0 -0
- {megfile-5.0.1.dist-info → megfile-5.0.3.dist-info}/top_level.txt +0 -0
megfile/__init__.py
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import megfile.config # noqa: F401 # make sure env config is loaded
|
|
2
|
-
from megfile.fs_path import FSPath, is_fs
|
|
2
|
+
from megfile.fs_path import FSPath, fs_copy, is_fs
|
|
3
3
|
from megfile.hdfs_path import HdfsPath, is_hdfs
|
|
4
4
|
from megfile.http_path import HttpPath, HttpsPath, is_http
|
|
5
5
|
from megfile.s3_path import (
|
|
@@ -7,13 +7,26 @@ from megfile.s3_path import (
|
|
|
7
7
|
is_s3,
|
|
8
8
|
s3_buffered_open,
|
|
9
9
|
s3_cached_open,
|
|
10
|
+
s3_concat,
|
|
11
|
+
s3_copy,
|
|
12
|
+
s3_download,
|
|
13
|
+
s3_load_content,
|
|
10
14
|
s3_memory_open,
|
|
11
15
|
s3_open,
|
|
12
16
|
s3_pipe_open,
|
|
13
17
|
s3_prefetch_open,
|
|
14
18
|
s3_share_cache_open,
|
|
19
|
+
s3_upload,
|
|
20
|
+
)
|
|
21
|
+
from megfile.sftp_path import (
|
|
22
|
+
SftpPath,
|
|
23
|
+
is_sftp,
|
|
24
|
+
sftp_add_host_key,
|
|
25
|
+
sftp_concat,
|
|
26
|
+
sftp_copy,
|
|
27
|
+
sftp_download,
|
|
28
|
+
sftp_upload,
|
|
15
29
|
)
|
|
16
|
-
from megfile.sftp_path import SftpPath, is_sftp, sftp_add_host_key
|
|
17
30
|
from megfile.smart import (
|
|
18
31
|
smart_access,
|
|
19
32
|
smart_cache,
|
|
@@ -73,9 +86,12 @@ except ImportError:
|
|
|
73
86
|
__all__ = [
|
|
74
87
|
"smart_access",
|
|
75
88
|
"smart_cache",
|
|
89
|
+
"smart_cache",
|
|
76
90
|
"smart_combine_open",
|
|
91
|
+
"smart_concat",
|
|
77
92
|
"smart_copy",
|
|
78
93
|
"smart_exists",
|
|
94
|
+
"smart_getmd5",
|
|
79
95
|
"smart_getmtime",
|
|
80
96
|
"smart_getsize",
|
|
81
97
|
"smart_glob_stat",
|
|
@@ -86,55 +102,62 @@ __all__ = [
|
|
|
86
102
|
"smart_islink",
|
|
87
103
|
"smart_listdir",
|
|
88
104
|
"smart_load_content",
|
|
89
|
-
"smart_save_content",
|
|
90
105
|
"smart_load_from",
|
|
91
106
|
"smart_load_text",
|
|
92
|
-
"
|
|
107
|
+
"smart_lstat",
|
|
93
108
|
"smart_makedirs",
|
|
109
|
+
"smart_move",
|
|
94
110
|
"smart_open",
|
|
95
111
|
"smart_path_join",
|
|
112
|
+
"smart_readlink",
|
|
96
113
|
"smart_realpath",
|
|
97
114
|
"smart_remove",
|
|
98
|
-
"smart_move",
|
|
99
115
|
"smart_rename",
|
|
100
116
|
"smart_save_as",
|
|
117
|
+
"smart_save_content",
|
|
118
|
+
"smart_save_text",
|
|
101
119
|
"smart_scan_stat",
|
|
102
120
|
"smart_scan",
|
|
103
121
|
"smart_scandir",
|
|
104
122
|
"smart_stat",
|
|
123
|
+
"smart_symlink",
|
|
105
124
|
"smart_sync",
|
|
106
125
|
"smart_touch",
|
|
107
126
|
"smart_unlink",
|
|
108
127
|
"smart_walk",
|
|
109
|
-
"
|
|
110
|
-
"
|
|
111
|
-
"
|
|
112
|
-
"
|
|
113
|
-
"smart_lstat",
|
|
114
|
-
"smart_concat",
|
|
128
|
+
"is_fs",
|
|
129
|
+
"fs_copy",
|
|
130
|
+
"is_hdfs",
|
|
131
|
+
"is_http",
|
|
115
132
|
"is_s3",
|
|
116
133
|
"s3_buffered_open",
|
|
117
134
|
"s3_cached_open",
|
|
135
|
+
"s3_concat",
|
|
136
|
+
"s3_copy",
|
|
137
|
+
"s3_download",
|
|
138
|
+
"s3_load_content",
|
|
118
139
|
"s3_memory_open",
|
|
119
140
|
"s3_open",
|
|
120
141
|
"s3_pipe_open",
|
|
121
142
|
"s3_prefetch_open",
|
|
122
143
|
"s3_share_cache_open",
|
|
123
|
-
"
|
|
124
|
-
"is_http",
|
|
125
|
-
"is_stdio",
|
|
126
|
-
"stdio_open",
|
|
144
|
+
"s3_upload",
|
|
127
145
|
"is_sftp",
|
|
128
146
|
"sftp_add_host_key",
|
|
129
|
-
"
|
|
147
|
+
"sftp_concat",
|
|
148
|
+
"sftp_copy",
|
|
149
|
+
"sftp_download",
|
|
150
|
+
"sftp_upload",
|
|
151
|
+
"is_stdio",
|
|
152
|
+
"stdio_open",
|
|
130
153
|
"is_webdav",
|
|
131
|
-
"WebdavPath",
|
|
132
|
-
"S3Path",
|
|
133
154
|
"FSPath",
|
|
155
|
+
"HdfsPath",
|
|
134
156
|
"HttpPath",
|
|
135
157
|
"HttpsPath",
|
|
136
|
-
"
|
|
137
|
-
"SmartPath",
|
|
158
|
+
"S3Path",
|
|
138
159
|
"SftpPath",
|
|
139
|
-
"
|
|
160
|
+
"SmartPath",
|
|
161
|
+
"StdioPath",
|
|
162
|
+
"WebdavPath",
|
|
140
163
|
]
|
megfile/cli.py
CHANGED
|
@@ -92,6 +92,8 @@ def safe_cli(): # pragma: no cover
|
|
|
92
92
|
|
|
93
93
|
|
|
94
94
|
def get_echo_path(file_stat, base_path: str = "", full: bool = False):
|
|
95
|
+
if base_path.startswith("file://"):
|
|
96
|
+
base_path = base_path[7:]
|
|
95
97
|
if base_path == file_stat.path:
|
|
96
98
|
path = file_stat.name
|
|
97
99
|
elif full:
|
|
@@ -150,6 +152,8 @@ def _sftp_prompt_host_key(path):
|
|
|
150
152
|
|
|
151
153
|
def _ls(path: str, long: bool, full: bool, recursive: bool, human_readable: bool):
|
|
152
154
|
base_path = path
|
|
155
|
+
if path == "file://":
|
|
156
|
+
path = "./"
|
|
153
157
|
if has_magic(path):
|
|
154
158
|
scan_func = smart_glob_stat
|
|
155
159
|
base_path = get_non_glob_dir(path)
|
|
@@ -186,7 +190,7 @@ class PathType(ParamType):
|
|
|
186
190
|
name = "path"
|
|
187
191
|
|
|
188
192
|
def shell_complete(self, ctx, param, incomplete):
|
|
189
|
-
if
|
|
193
|
+
if not incomplete:
|
|
190
194
|
completions = [
|
|
191
195
|
CompletionItem(f"{protocol}://")
|
|
192
196
|
for protocol in SmartPath._registered_protocols
|
|
@@ -196,6 +200,15 @@ class PathType(ParamType):
|
|
|
196
200
|
continue
|
|
197
201
|
completions.append(CompletionItem(f"s3+{name}://"))
|
|
198
202
|
return completions
|
|
203
|
+
if incomplete.startswith("file://"):
|
|
204
|
+
return [
|
|
205
|
+
CompletionItem(
|
|
206
|
+
f"file://{entry.path}/"
|
|
207
|
+
if entry.is_dir()
|
|
208
|
+
else f"file://{entry.path}"
|
|
209
|
+
)
|
|
210
|
+
for entry in islice(smart_glob_stat(incomplete[7:] + "*"), 128)
|
|
211
|
+
]
|
|
199
212
|
try:
|
|
200
213
|
return [
|
|
201
214
|
CompletionItem(f"{entry.path}/" if entry.is_dir() else entry.path)
|
megfile/errors.py
CHANGED
|
@@ -182,7 +182,7 @@ def patch_method(
|
|
|
182
182
|
f"Cannot handle error {full_error_message(error)} "
|
|
183
183
|
f"after {retries} tries"
|
|
184
184
|
)
|
|
185
|
-
raise
|
|
185
|
+
raise MaxRetriesExceededError(error, retries=retries)
|
|
186
186
|
retry_interval = min(0.1 * 2**retries, 30)
|
|
187
187
|
_logger.info(
|
|
188
188
|
f"unknown error encountered: {full_error_message(error)}, "
|
|
@@ -209,14 +209,34 @@ def _create_missing_ok_generator(generator, missing_ok: bool, error: Exception):
|
|
|
209
209
|
return create_generator()
|
|
210
210
|
|
|
211
211
|
|
|
212
|
-
class
|
|
213
|
-
def __init__(self, error: Exception,
|
|
214
|
-
|
|
215
|
-
|
|
212
|
+
class MaxRetriesExceededError(Exception):
|
|
213
|
+
def __init__(self, error: Exception, retries: int = 1):
|
|
214
|
+
while isinstance(error, MaxRetriesExceededError):
|
|
215
|
+
retries *= error.retries
|
|
216
|
+
error = error.__cause__
|
|
217
|
+
message = "Max retires exceeded: %s, after %d tries" % (
|
|
216
218
|
full_error_message(error),
|
|
219
|
+
retries,
|
|
217
220
|
)
|
|
221
|
+
super().__init__(message)
|
|
222
|
+
self.retries = retries
|
|
223
|
+
self.__cause__ = error
|
|
224
|
+
|
|
225
|
+
def __reduce__(self):
|
|
226
|
+
return (self.__class__, (self.__cause__, self.retries))
|
|
227
|
+
|
|
228
|
+
|
|
229
|
+
class UnknownError(Exception):
|
|
230
|
+
def __init__(self, error: Exception, path: PathLike, extra: Optional[str] = None):
|
|
231
|
+
parts = [f"Unknown error encountered: {path!r}"]
|
|
232
|
+
if isinstance(error, MaxRetriesExceededError):
|
|
233
|
+
parts.append(f"error: {full_error_message(error.__cause__)}")
|
|
234
|
+
parts.append(f"after {error.retries} tries")
|
|
235
|
+
else:
|
|
236
|
+
parts.append(f"error: {full_error_message(error)}")
|
|
218
237
|
if extra is not None:
|
|
219
|
-
|
|
238
|
+
parts.append(extra)
|
|
239
|
+
message = ", ".join(parts)
|
|
220
240
|
super().__init__(message)
|
|
221
241
|
self.path = path
|
|
222
242
|
self.extra = extra
|
|
@@ -350,6 +370,8 @@ def translate_fs_error(fs_error: Exception, fs_path: PathLike) -> Exception:
|
|
|
350
370
|
if fs_error.filename is None:
|
|
351
371
|
fs_error.filename = fs_path
|
|
352
372
|
return fs_error
|
|
373
|
+
if isinstance(fs_error, MaxRetriesExceededError):
|
|
374
|
+
return fs_error.__cause__
|
|
353
375
|
return fs_error
|
|
354
376
|
|
|
355
377
|
|
|
@@ -359,7 +381,10 @@ def translate_s3_error(s3_error: Exception, s3_url: PathLike) -> Exception:
|
|
|
359
381
|
"""
|
|
360
382
|
if isinstance(s3_error, S3Exception):
|
|
361
383
|
return s3_error
|
|
362
|
-
|
|
384
|
+
ori_error = s3_error
|
|
385
|
+
if isinstance(s3_error, MaxRetriesExceededError):
|
|
386
|
+
s3_error = s3_error.__cause__
|
|
387
|
+
if isinstance(s3_error, ClientError):
|
|
363
388
|
code = client_error_code(s3_error)
|
|
364
389
|
if code in ("NoSuchBucket"):
|
|
365
390
|
bucket_or_url = (
|
|
@@ -419,7 +444,7 @@ def translate_s3_error(s3_error: Exception, s3_url: PathLike) -> Exception:
|
|
|
419
444
|
return S3InvalidRangeError("Invalid range: %r" % s3_url)
|
|
420
445
|
elif "AccessDenied" in str(s3_error):
|
|
421
446
|
return S3PermissionError("Access denied: %r" % s3_url)
|
|
422
|
-
return S3UnknownError(
|
|
447
|
+
return S3UnknownError(ori_error, s3_url)
|
|
423
448
|
|
|
424
449
|
|
|
425
450
|
def translate_http_error(http_error: Exception, http_url: str) -> Exception:
|
|
@@ -434,13 +459,16 @@ def translate_http_error(http_error: Exception, http_url: str) -> Exception:
|
|
|
434
459
|
"""
|
|
435
460
|
if isinstance(http_error, HttpException):
|
|
436
461
|
return http_error
|
|
462
|
+
ori_error = http_error
|
|
463
|
+
if isinstance(http_error, MaxRetriesExceededError):
|
|
464
|
+
http_error = http_error.__cause__
|
|
437
465
|
if isinstance(http_error, HTTPError):
|
|
438
466
|
status_code = http_error.response.status_code
|
|
439
467
|
if status_code == 401 or status_code == 403:
|
|
440
468
|
return HttpPermissionError("Permission denied: %r" % http_url)
|
|
441
469
|
elif status_code == 404:
|
|
442
470
|
return HttpFileNotFoundError("No such file: %r" % http_url)
|
|
443
|
-
return HttpUnknownError(
|
|
471
|
+
return HttpUnknownError(ori_error, http_url)
|
|
444
472
|
|
|
445
473
|
|
|
446
474
|
@contextmanager
|
|
@@ -476,6 +504,8 @@ def translate_hdfs_error(hdfs_error: Exception, hdfs_path: PathLike) -> Exceptio
|
|
|
476
504
|
elif hdfs_error.status_code == 404:
|
|
477
505
|
return FileNotFoundError(f"No match file: {hdfs_path}")
|
|
478
506
|
# pytype: enable=attribute-error
|
|
507
|
+
if isinstance(hdfs_error, MaxRetriesExceededError):
|
|
508
|
+
return hdfs_error.__cause__
|
|
479
509
|
return hdfs_error
|
|
480
510
|
|
|
481
511
|
|
megfile/pathlike.py
CHANGED
|
@@ -565,7 +565,7 @@ class BasePath:
|
|
|
565
565
|
|
|
566
566
|
@cached_property
|
|
567
567
|
def anchor(self) -> str:
|
|
568
|
-
return self.root
|
|
568
|
+
return self.root
|
|
569
569
|
|
|
570
570
|
def joinpath(self: Self, *other_paths: "PathLike") -> Self:
|
|
571
571
|
"""
|
|
@@ -863,7 +863,7 @@ class URIPath(BasePath):
|
|
|
863
863
|
path = path.lstrip("/")
|
|
864
864
|
if path != "":
|
|
865
865
|
parts.extend(path.split("/"))
|
|
866
|
-
return tuple(parts)
|
|
866
|
+
return tuple(parts)
|
|
867
867
|
|
|
868
868
|
@cached_property
|
|
869
869
|
def parents(self) -> "URIPathParents":
|
|
@@ -917,10 +917,18 @@ class URIPathParents(Sequence):
|
|
|
917
917
|
self.parts = parts
|
|
918
918
|
|
|
919
919
|
def __len__(self):
|
|
920
|
+
if (
|
|
921
|
+
(self.prefix == "" or "://" in self.prefix)
|
|
922
|
+
and len(self.parts) > 0
|
|
923
|
+
and self.parts[0] != "/"
|
|
924
|
+
):
|
|
925
|
+
return len(self.parts)
|
|
920
926
|
return max(len(self.parts) - 1, 0)
|
|
921
927
|
|
|
922
928
|
def __getitem__(self, idx):
|
|
923
|
-
if idx < 0
|
|
929
|
+
if idx < 0:
|
|
930
|
+
idx += len(self)
|
|
931
|
+
if idx < 0 or idx >= len(self):
|
|
924
932
|
raise IndexError(idx)
|
|
925
933
|
|
|
926
934
|
if len(self.parts[: -idx - 1]) > 1:
|
megfile/version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
VERSION = "5.0.
|
|
1
|
+
VERSION = "5.0.3"
|
|
@@ -1,19 +1,19 @@
|
|
|
1
|
-
megfile/__init__.py,sha256=
|
|
2
|
-
megfile/cli.py,sha256=
|
|
1
|
+
megfile/__init__.py,sha256=4XbMsR-lM7MxbnPGBI16m2sq6ghuA2-zZj2XF4bbX2Y,3291
|
|
2
|
+
megfile/cli.py,sha256=TjUhfcOB_weEBa_e55nmiajq_XObAhQa7sx5oivrQCw,30620
|
|
3
3
|
megfile/config.py,sha256=K3B_o2dnI7qGsGnK8Jg18-S5YYLYuzskfNJowlSMkQM,5065
|
|
4
|
-
megfile/errors.py,sha256=
|
|
4
|
+
megfile/errors.py,sha256=zKwM5r5j89mlbWZNeax26Hq63NmQhl9iGMfTtgyvYNA,16830
|
|
5
5
|
megfile/fs_path.py,sha256=tt2__W6E4vep0lmVreTLIW63njl-EzyQEEkEGziyAb4,41015
|
|
6
6
|
megfile/hdfs_path.py,sha256=OmUe3vA3Qoxnqtcq0Rs3ygBvzAtqUz3fGo8iP5sWneE,26058
|
|
7
7
|
megfile/http_path.py,sha256=08OmzmRMyLSyq1Yr1K2HbzexesURJrIoA6AibwYzUiA,13844
|
|
8
8
|
megfile/interfaces.py,sha256=XU46U5pl4k1Gse63i4z5SvxcjWeKLj0xyB0Y6fYiWWo,9887
|
|
9
|
-
megfile/pathlike.py,sha256=
|
|
9
|
+
megfile/pathlike.py,sha256=4RuYHqUc5_6rZDCcVo_18il0Hy7BlOYt-rtYwCtp9Gg,31446
|
|
10
10
|
megfile/s3_path.py,sha256=LINHnHnpesXnf9wxbV6n0xQVT0wPwyjLc7xAasakefU,94467
|
|
11
11
|
megfile/sftp2_path.py,sha256=K90bnMVAx0MQPGXP6LogGuDRzaD4MPR6lMOfdY9C9-0,37942
|
|
12
12
|
megfile/sftp_path.py,sha256=_KU7_-Mq2m7lcLY1mpiGrju0SP-OsdEXlRjFhZH25UA,51223
|
|
13
13
|
megfile/smart.py,sha256=Lab2jxprj-zvPw5GqUWlWiEY8bcpRlviks_qp9r-km8,38224
|
|
14
14
|
megfile/smart_path.py,sha256=kGidkM5S58ChE3LVZMcUACs3IQgsqh9m04sp6-wxuhk,12615
|
|
15
15
|
megfile/stdio_path.py,sha256=cxaDr8rtisTPnN-rjtaEpqQnshwiqwXFUJBM9xWY7Cg,2711
|
|
16
|
-
megfile/version.py,sha256=
|
|
16
|
+
megfile/version.py,sha256=ayvDTXMtNAcyGKXxogED3ZAvUOOsfQYG6VqgcDYL7o8,19
|
|
17
17
|
megfile/webdav_path.py,sha256=xQmZMt-hDA7PfHzuSjRYaIoJA_Nbi1jsg952KZJhs-E,31364
|
|
18
18
|
megfile/lib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
19
19
|
megfile/lib/base_memory_handler.py,sha256=i5-YHpL0k2tuFBnP9VMzb3_OsaU6D6j6thkmWgptnFg,2680
|
|
@@ -42,10 +42,10 @@ megfile/lib/webdav_memory_handler.py,sha256=_UccPYPpvfTd4gSEhBFL1BHeyFtsBJdhVINk
|
|
|
42
42
|
megfile/lib/webdav_prefetch_reader.py,sha256=M0X6E6t-DS5q9KiLvjVZx_AZuiW9SaIkBnIPLc774GQ,3941
|
|
43
43
|
megfile/utils/__init__.py,sha256=4hBVSXbNTbDj7Je0y9SbwgcPm_s41H9v3eHUMr9JNGo,12700
|
|
44
44
|
megfile/utils/mutex.py,sha256=asb8opGLgK22RiuBJUnfsvB8LnMmodP8KzCVHKmQBWA,2561
|
|
45
|
-
megfile-5.0.
|
|
46
|
-
megfile-5.0.
|
|
47
|
-
megfile-5.0.
|
|
48
|
-
megfile-5.0.
|
|
49
|
-
megfile-5.0.
|
|
50
|
-
megfile-5.0.
|
|
51
|
-
megfile-5.0.
|
|
45
|
+
megfile-5.0.3.dist-info/licenses/LICENSE,sha256=WNHhf_5RCaeuKWyq_K39vmp9F28LxKsB4SpomwSZ2L0,11357
|
|
46
|
+
megfile-5.0.3.dist-info/licenses/LICENSE.pyre,sha256=9lf5nT-5ZH25JijpYAequ0bl8E8z5JmZB1qrjiUMp84,1080
|
|
47
|
+
megfile-5.0.3.dist-info/METADATA,sha256=qc9JcTR52y09F_lY2fiCuwCkeRzfekculMSNeXBEP_g,9225
|
|
48
|
+
megfile-5.0.3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
49
|
+
megfile-5.0.3.dist-info/entry_points.txt,sha256=M6ZWSSv5_5_QtIpZafy3vq7WuOJ_5dSGQQnEZbByt2Q,49
|
|
50
|
+
megfile-5.0.3.dist-info/top_level.txt,sha256=i3rMgdU1ZAJekAceojhA-bkm3749PzshtRmLTbeLUPQ,8
|
|
51
|
+
megfile-5.0.3.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|