megfile 4.2.4__py3-none-any.whl → 5.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- megfile/__init__.py +16 -291
- megfile/cli.py +37 -20
- megfile/config.py +10 -1
- megfile/errors.py +2 -2
- megfile/fs_path.py +78 -12
- megfile/interfaces.py +44 -0
- megfile/lib/base_memory_handler.py +92 -0
- megfile/lib/glob.py +3 -3
- megfile/lib/http_prefetch_reader.py +22 -22
- megfile/lib/joinpath.py +13 -0
- megfile/lib/s3_buffered_writer.py +13 -0
- megfile/lib/s3_limited_seekable_writer.py +2 -0
- megfile/lib/s3_memory_handler.py +14 -81
- megfile/lib/webdav_memory_handler.py +83 -0
- megfile/lib/webdav_prefetch_reader.py +115 -0
- megfile/pathlike.py +3 -4
- megfile/s3_path.py +44 -33
- megfile/sftp2_path.py +44 -62
- megfile/sftp_path.py +239 -2
- megfile/smart.py +70 -29
- megfile/smart_path.py +181 -85
- megfile/version.py +1 -1
- megfile/webdav_path.py +952 -0
- {megfile-4.2.4.dist-info → megfile-5.0.0.dist-info}/METADATA +30 -39
- megfile-5.0.0.dist-info/RECORD +51 -0
- megfile/fs.py +0 -614
- megfile/hdfs.py +0 -408
- megfile/http.py +0 -114
- megfile/s3.py +0 -540
- megfile/sftp.py +0 -821
- megfile/sftp2.py +0 -827
- megfile/stdio.py +0 -30
- megfile-4.2.4.dist-info/RECORD +0 -54
- {megfile-4.2.4.dist-info → megfile-5.0.0.dist-info}/WHEEL +0 -0
- {megfile-4.2.4.dist-info → megfile-5.0.0.dist-info}/entry_points.txt +0 -0
- {megfile-4.2.4.dist-info → megfile-5.0.0.dist-info}/licenses/LICENSE +0 -0
- {megfile-4.2.4.dist-info → megfile-5.0.0.dist-info}/licenses/LICENSE.pyre +0 -0
- {megfile-4.2.4.dist-info → megfile-5.0.0.dist-info}/top_level.txt +0 -0
megfile/s3_path.py
CHANGED
|
@@ -93,7 +93,6 @@ __all__ = [
|
|
|
93
93
|
"get_endpoint_url",
|
|
94
94
|
"get_s3_session",
|
|
95
95
|
"get_s3_client",
|
|
96
|
-
"s3_path_join",
|
|
97
96
|
"is_s3",
|
|
98
97
|
"s3_buffered_open",
|
|
99
98
|
"s3_cached_open",
|
|
@@ -104,6 +103,7 @@ __all__ = [
|
|
|
104
103
|
"s3_open",
|
|
105
104
|
"S3Cacher",
|
|
106
105
|
"s3_upload",
|
|
106
|
+
"s3_copy",
|
|
107
107
|
"s3_download",
|
|
108
108
|
"s3_load_content",
|
|
109
109
|
"s3_concat",
|
|
@@ -230,7 +230,7 @@ def get_endpoint_url(profile_name: Optional[str] = None) -> str:
|
|
|
230
230
|
config_endpoint_url = config.get("s3", {}).get("endpoint_url")
|
|
231
231
|
config_endpoint_url = config_endpoint_url or config.get("endpoint_url")
|
|
232
232
|
if config_endpoint_url:
|
|
233
|
-
warning_endpoint_url("~/.aws/config", config_endpoint_url)
|
|
233
|
+
warning_endpoint_url("~/.aws/config or ~/.aws/credentials", config_endpoint_url)
|
|
234
234
|
return config_endpoint_url
|
|
235
235
|
return endpoint_url
|
|
236
236
|
|
|
@@ -351,7 +351,7 @@ def get_s3_client_with_cache(
|
|
|
351
351
|
)
|
|
352
352
|
|
|
353
353
|
|
|
354
|
-
def
|
|
354
|
+
def _s3_path_join(path: PathLike, *other_paths: PathLike) -> str:
|
|
355
355
|
"""
|
|
356
356
|
Concat 2 or more path to a complete path
|
|
357
357
|
|
|
@@ -366,7 +366,7 @@ def s3_path_join(path: PathLike, *other_paths: PathLike) -> str:
|
|
|
366
366
|
and will directly concat.
|
|
367
367
|
|
|
368
368
|
e.g. os.path.join('/path', 'to', '/file') => '/file',
|
|
369
|
-
but
|
|
369
|
+
but _s3_path_join('/path', 'to', '/file') => '/path/to/file'
|
|
370
370
|
"""
|
|
371
371
|
return uri_join(fspath(path), *map(fspath, other_paths))
|
|
372
372
|
|
|
@@ -603,7 +603,7 @@ def _s3_glob_stat_single_path(
|
|
|
603
603
|
with raise_s3_error(_s3_pathname, S3BucketNotFoundError):
|
|
604
604
|
for resp in _list_objects_recursive(client, bucket, prefix, delimiter):
|
|
605
605
|
for content in resp.get("Contents", []):
|
|
606
|
-
path =
|
|
606
|
+
path = _s3_path_join(f"{protocol}://", bucket, content["Key"])
|
|
607
607
|
if not search_dir and pattern.match(path):
|
|
608
608
|
if path.endswith("/"):
|
|
609
609
|
continue
|
|
@@ -619,7 +619,7 @@ def _s3_glob_stat_single_path(
|
|
|
619
619
|
)
|
|
620
620
|
dirname = os.path.dirname(dirname)
|
|
621
621
|
for common_prefix in resp.get("CommonPrefixes", []):
|
|
622
|
-
path =
|
|
622
|
+
path = _s3_path_join(
|
|
623
623
|
f"{protocol}://", bucket, common_prefix["Prefix"]
|
|
624
624
|
)
|
|
625
625
|
dirname = os.path.dirname(path)
|
|
@@ -640,7 +640,7 @@ def _s3_scan_pairs(
|
|
|
640
640
|
for src_file_path in S3Path(src_url).scan():
|
|
641
641
|
content_path = src_file_path[len(fspath(src_url)) :]
|
|
642
642
|
if len(content_path) > 0:
|
|
643
|
-
dst_file_path =
|
|
643
|
+
dst_file_path = _s3_path_join(dst_url, content_path)
|
|
644
644
|
else:
|
|
645
645
|
dst_file_path = dst_url
|
|
646
646
|
yield src_file_path, dst_file_path
|
|
@@ -937,6 +937,7 @@ def s3_buffered_open(
|
|
|
937
937
|
buffered: bool = False,
|
|
938
938
|
share_cache_key: Optional[str] = None,
|
|
939
939
|
cache_path: Optional[str] = None,
|
|
940
|
+
atomic: bool = False,
|
|
940
941
|
) -> IO:
|
|
941
942
|
"""Open an asynchronous prefetch reader, to support fast sequential read
|
|
942
943
|
|
|
@@ -1045,6 +1046,7 @@ def s3_buffered_open(
|
|
|
1045
1046
|
block_size=block_size,
|
|
1046
1047
|
max_buffer_size=max_buffer_size,
|
|
1047
1048
|
profile_name=s3_url._profile_name,
|
|
1049
|
+
atomic=atomic,
|
|
1048
1050
|
)
|
|
1049
1051
|
else:
|
|
1050
1052
|
if max_buffer_size is None:
|
|
@@ -1057,6 +1059,7 @@ def s3_buffered_open(
|
|
|
1057
1059
|
block_size=block_size,
|
|
1058
1060
|
max_buffer_size=max_buffer_size,
|
|
1059
1061
|
profile_name=s3_url._profile_name,
|
|
1062
|
+
atomic=atomic,
|
|
1060
1063
|
)
|
|
1061
1064
|
if buffered or _is_pickle(writer):
|
|
1062
1065
|
writer = io.BufferedWriter(writer) # type: ignore
|
|
@@ -1100,6 +1103,27 @@ def s3_memory_open(
|
|
|
1100
1103
|
s3_open = s3_buffered_open
|
|
1101
1104
|
|
|
1102
1105
|
|
|
1106
|
+
def s3_copy(
|
|
1107
|
+
src_url: PathLike,
|
|
1108
|
+
dst_url: PathLike,
|
|
1109
|
+
callback: Optional[Callable[[int], None]] = None,
|
|
1110
|
+
followlinks: bool = False,
|
|
1111
|
+
overwrite: bool = True,
|
|
1112
|
+
) -> None:
|
|
1113
|
+
"""File copy on S3
|
|
1114
|
+
Copy content of file on `src_path` to `dst_path`.
|
|
1115
|
+
It's caller's responsibility to ensure the s3_isfile(src_url) is True
|
|
1116
|
+
|
|
1117
|
+
:param src_url: Given path
|
|
1118
|
+
:param dst_path: Target file path
|
|
1119
|
+
:param callback: Called periodically during copy, and the input parameter is
|
|
1120
|
+
the data size (in bytes) of copy since the last call
|
|
1121
|
+
:param followlinks: False if regard symlink as file, else True
|
|
1122
|
+
:param overwrite: whether or not overwrite file when exists, default is True
|
|
1123
|
+
"""
|
|
1124
|
+
return S3Path(src_url).copy(dst_url, callback, followlinks, overwrite)
|
|
1125
|
+
|
|
1126
|
+
|
|
1103
1127
|
def s3_download(
|
|
1104
1128
|
src_url: PathLike,
|
|
1105
1129
|
dst_url: PathLike,
|
|
@@ -1117,8 +1141,7 @@ def s3_download(
|
|
|
1117
1141
|
:param followlinks: False if regard symlink as file, else True
|
|
1118
1142
|
:param overwrite: whether or not overwrite file when exists, default is True
|
|
1119
1143
|
"""
|
|
1120
|
-
from megfile.
|
|
1121
|
-
from megfile.fs_path import FSPath
|
|
1144
|
+
from megfile.fs_path import FSPath, is_fs
|
|
1122
1145
|
|
|
1123
1146
|
dst_url = fspath(dst_url)
|
|
1124
1147
|
if not is_fs(dst_url):
|
|
@@ -1198,8 +1221,7 @@ def s3_upload(
|
|
|
1198
1221
|
:param followlinks: False if regard symlink as file, else True
|
|
1199
1222
|
:param overwrite: whether or not overwrite file when exists, default is True
|
|
1200
1223
|
"""
|
|
1201
|
-
from megfile.
|
|
1202
|
-
from megfile.fs_path import FSPath
|
|
1224
|
+
from megfile.fs_path import FSPath, is_fs
|
|
1203
1225
|
|
|
1204
1226
|
if not is_fs(src_url):
|
|
1205
1227
|
raise OSError(f"src_url is not fs path: {src_url}")
|
|
@@ -1396,11 +1418,9 @@ class S3Path(URIPath):
|
|
|
1396
1418
|
@cached_property
|
|
1397
1419
|
def path_with_protocol(self) -> str:
|
|
1398
1420
|
"""Return path with protocol, like file:///root, s3://bucket/key"""
|
|
1399
|
-
|
|
1400
|
-
|
|
1401
|
-
|
|
1402
|
-
return path
|
|
1403
|
-
return protocol_prefix + path.lstrip("/")
|
|
1421
|
+
if self.path.startswith(self.root):
|
|
1422
|
+
return self.path
|
|
1423
|
+
return self.root + self.path.lstrip("/")
|
|
1404
1424
|
|
|
1405
1425
|
@cached_property
|
|
1406
1426
|
def path_without_protocol(self) -> str:
|
|
@@ -1408,21 +1428,14 @@ class S3Path(URIPath):
|
|
|
1408
1428
|
Return path without protocol, example: if path is s3://bucket/key,
|
|
1409
1429
|
return bucket/key
|
|
1410
1430
|
"""
|
|
1411
|
-
|
|
1412
|
-
|
|
1413
|
-
|
|
1414
|
-
path = path[len(protocol_prefix) :]
|
|
1415
|
-
return path
|
|
1431
|
+
if self.path.startswith(self.root):
|
|
1432
|
+
return self.path[len(self.root) :]
|
|
1433
|
+
return self.path
|
|
1416
1434
|
|
|
1417
1435
|
@cached_property
|
|
1418
|
-
def
|
|
1419
|
-
"""
|
|
1420
|
-
|
|
1421
|
-
path = self.path_without_protocol
|
|
1422
|
-
path = path.lstrip("/")
|
|
1423
|
-
if path != "":
|
|
1424
|
-
parts.extend(path.split("/"))
|
|
1425
|
-
return tuple(parts)
|
|
1436
|
+
def root(self) -> str:
|
|
1437
|
+
"""Return root of the path, like s3://"""
|
|
1438
|
+
return f"{self._protocol_with_profile}://"
|
|
1426
1439
|
|
|
1427
1440
|
@cached_property
|
|
1428
1441
|
def _client(self):
|
|
@@ -1991,7 +2004,7 @@ class S3Path(URIPath):
|
|
|
1991
2004
|
for content in resp.get("Contents", []):
|
|
1992
2005
|
if content["Key"].endswith("/"):
|
|
1993
2006
|
continue
|
|
1994
|
-
path =
|
|
2007
|
+
path = _s3_path_join(f"{protocol}://", bucket, content["Key"])
|
|
1995
2008
|
|
|
1996
2009
|
if followlinks:
|
|
1997
2010
|
try:
|
|
@@ -2238,9 +2251,7 @@ class S3Path(URIPath):
|
|
|
2238
2251
|
dirs = sorted(dirs)
|
|
2239
2252
|
stack.extend(reversed(dirs))
|
|
2240
2253
|
|
|
2241
|
-
root =
|
|
2242
|
-
f"{self._protocol_with_profile}://", bucket, current
|
|
2243
|
-
)[:-1]
|
|
2254
|
+
root = _s3_path_join(self.root, bucket, current)[:-1]
|
|
2244
2255
|
dirs = [path[len(current) :] for path in dirs]
|
|
2245
2256
|
files = sorted(path[len(current) :] for path in files)
|
|
2246
2257
|
if files or dirs or not current:
|
megfile/sftp2_path.py
CHANGED
|
@@ -14,6 +14,7 @@ from urllib.parse import urlsplit, urlunsplit
|
|
|
14
14
|
import ssh2.session # type: ignore
|
|
15
15
|
import ssh2.sftp # type: ignore
|
|
16
16
|
from ssh2.exceptions import SFTPProtocolError # type: ignore
|
|
17
|
+
from ssh2.sftp_handle import SFTPAttributes # type: ignore
|
|
17
18
|
|
|
18
19
|
from megfile.config import SFTP_MAX_RETRY_TIMES
|
|
19
20
|
from megfile.errors import SameFileError, _create_missing_ok_generator
|
|
@@ -32,12 +33,12 @@ __all__ = [
|
|
|
32
33
|
"is_sftp2",
|
|
33
34
|
]
|
|
34
35
|
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
36
|
+
SFTP_USERNAME = "SFTP_USERNAME"
|
|
37
|
+
SFTP_PASSWORD = "SFTP_PASSWORD"
|
|
38
|
+
SFTP_PRIVATE_KEY_PATH = "SFTP_PRIVATE_KEY_PATH"
|
|
39
|
+
SFTP_PRIVATE_KEY_TYPE = "SFTP_PRIVATE_KEY_TYPE"
|
|
40
|
+
SFTP_PRIVATE_KEY_PASSWORD = "SFTP_PRIVATE_KEY_PASSWORD"
|
|
41
|
+
SFTP_MAX_UNAUTH_CONN = "SFTP_MAX_UNAUTH_CONN"
|
|
41
42
|
MAX_RETRIES = SFTP_MAX_RETRY_TIMES
|
|
42
43
|
DEFAULT_SSH_CONNECT_TIMEOUT = 5
|
|
43
44
|
DEFAULT_SSH_KEEPALIVE_INTERVAL = 15
|
|
@@ -65,11 +66,11 @@ def _make_stat(stat) -> StatResult:
|
|
|
65
66
|
|
|
66
67
|
def get_private_key():
|
|
67
68
|
"""Get private key for SSH authentication"""
|
|
68
|
-
private_key_path = os.getenv(
|
|
69
|
+
private_key_path = os.getenv(SFTP_PRIVATE_KEY_PATH)
|
|
69
70
|
if private_key_path:
|
|
70
71
|
if not os.path.exists(private_key_path):
|
|
71
72
|
raise FileNotFoundError(f"Private key file not exist: '{private_key_path}'")
|
|
72
|
-
private_key_password = os.getenv(
|
|
73
|
+
private_key_password = os.getenv(SFTP_PRIVATE_KEY_PASSWORD)
|
|
73
74
|
if private_key_password:
|
|
74
75
|
return private_key_path, private_key_password
|
|
75
76
|
return private_key_path, ""
|
|
@@ -86,12 +87,12 @@ def provide_connect_info(
|
|
|
86
87
|
if not port:
|
|
87
88
|
port = 22
|
|
88
89
|
if not username:
|
|
89
|
-
username = os.getenv(
|
|
90
|
+
username = os.getenv(SFTP_USERNAME)
|
|
90
91
|
if not username:
|
|
91
92
|
# 如果没有指定用户名,使用当前系统用户名
|
|
92
93
|
username = getpass.getuser()
|
|
93
94
|
if not password:
|
|
94
|
-
password = os.getenv(
|
|
95
|
+
password = os.getenv(SFTP_PASSWORD)
|
|
95
96
|
private_key = get_private_key()
|
|
96
97
|
return hostname, port, username, password, private_key
|
|
97
98
|
|
|
@@ -411,10 +412,7 @@ class Sftp2Path(URIPath):
|
|
|
411
412
|
"""sftp2 protocol
|
|
412
413
|
|
|
413
414
|
uri format:
|
|
414
|
-
-
|
|
415
|
-
- sftp2://[username[:password]@]hostname[:port]//file_path
|
|
416
|
-
- relative path
|
|
417
|
-
- sftp2://[username[:password]@]hostname[:port]/file_path
|
|
415
|
+
- sftp2://[username[:password]@]hostname[:port]/file_path
|
|
418
416
|
"""
|
|
419
417
|
|
|
420
418
|
protocol = "sftp2"
|
|
@@ -423,27 +421,12 @@ class Sftp2Path(URIPath):
|
|
|
423
421
|
super().__init__(path, *other_paths)
|
|
424
422
|
parts = urlsplit(self.path)
|
|
425
423
|
self._urlsplit_parts = parts
|
|
426
|
-
self.
|
|
427
|
-
if parts.path.startswith("//"):
|
|
428
|
-
self._root_dir = "/"
|
|
429
|
-
else:
|
|
430
|
-
self._root_dir = "/" # Default to absolute path for ssh2
|
|
431
|
-
self._real_path = (
|
|
432
|
-
parts.path.lstrip("/")
|
|
433
|
-
if not parts.path.startswith("//")
|
|
434
|
-
else parts.path[2:]
|
|
435
|
-
)
|
|
436
|
-
if not self._real_path.startswith("/"):
|
|
437
|
-
self._real_path = f"/{self._real_path}"
|
|
424
|
+
self._remote_path = parts.path or "/"
|
|
438
425
|
|
|
439
426
|
@cached_property
|
|
440
427
|
def parts(self) -> Tuple[str, ...]:
|
|
441
428
|
"""A tuple giving access to the path's various components"""
|
|
442
|
-
|
|
443
|
-
new_parts = self._urlsplit_parts._replace(path="//")
|
|
444
|
-
else:
|
|
445
|
-
new_parts = self._urlsplit_parts._replace(path="/")
|
|
446
|
-
parts = [urlunsplit(new_parts)]
|
|
429
|
+
parts = [urlunsplit(self._urlsplit_parts._replace(path=""))]
|
|
447
430
|
path = self._urlsplit_parts.path.lstrip("/")
|
|
448
431
|
if path != "":
|
|
449
432
|
parts.extend(path.split("/"))
|
|
@@ -511,12 +494,6 @@ class Sftp2Path(URIPath):
|
|
|
511
494
|
)
|
|
512
495
|
|
|
513
496
|
def _generate_path_object(self, sftp_local_path: str, resolve: bool = False):
|
|
514
|
-
if resolve or self._root_dir == "/":
|
|
515
|
-
sftp_local_path = f"//{sftp_local_path.lstrip('/')}"
|
|
516
|
-
else:
|
|
517
|
-
sftp_local_path = os.path.relpath(sftp_local_path, start=self._root_dir)
|
|
518
|
-
if sftp_local_path == ".":
|
|
519
|
-
sftp_local_path = "/"
|
|
520
497
|
new_parts = self._urlsplit_parts._replace(path=sftp_local_path)
|
|
521
498
|
return self.from_path(urlunsplit(new_parts))
|
|
522
499
|
|
|
@@ -580,12 +557,12 @@ class Sftp2Path(URIPath):
|
|
|
580
557
|
return self.from_path(path).is_dir(followlinks=followlinks)
|
|
581
558
|
|
|
582
559
|
fs = FSFunc(_exist, _is_dir, _scandir)
|
|
583
|
-
for
|
|
560
|
+
for remote_path in _create_missing_ok_generator(
|
|
584
561
|
iglob(fspath(glob_path), recursive=recursive, fs=fs),
|
|
585
562
|
missing_ok,
|
|
586
563
|
FileNotFoundError(f"No match any file: {glob_path!r}"),
|
|
587
564
|
):
|
|
588
|
-
yield self.from_path(
|
|
565
|
+
yield self.from_path(remote_path)
|
|
589
566
|
|
|
590
567
|
def is_dir(self, followlinks: bool = False) -> bool:
|
|
591
568
|
"""Test if a path is directory"""
|
|
@@ -639,7 +616,7 @@ class Sftp2Path(URIPath):
|
|
|
639
616
|
for parent_path_object in parent_path_objects[::-1]:
|
|
640
617
|
parent_path_object.mkdir(mode=mode, parents=False, exist_ok=True)
|
|
641
618
|
try:
|
|
642
|
-
self._client.mkdir(self.
|
|
619
|
+
self._client.mkdir(self._remote_path, mode)
|
|
643
620
|
except OSError:
|
|
644
621
|
if not self.exists():
|
|
645
622
|
raise
|
|
@@ -670,7 +647,7 @@ class Sftp2Path(URIPath):
|
|
|
670
647
|
if self._is_same_backend(dst_path):
|
|
671
648
|
if overwrite:
|
|
672
649
|
dst_path.remove(missing_ok=True)
|
|
673
|
-
self._client.rename(self.
|
|
650
|
+
self._client.rename(self._remote_path, dst_path._remote_path)
|
|
674
651
|
else:
|
|
675
652
|
self.sync(dst_path, overwrite=overwrite)
|
|
676
653
|
self.remove(missing_ok=True)
|
|
@@ -680,7 +657,7 @@ class Sftp2Path(URIPath):
|
|
|
680
657
|
self.from_path(file_entry.path).rename(
|
|
681
658
|
dst_path.joinpath(file_entry.name)
|
|
682
659
|
)
|
|
683
|
-
self._client.rmdir(self.
|
|
660
|
+
self._client.rmdir(self._remote_path)
|
|
684
661
|
else:
|
|
685
662
|
if overwrite or not dst_path.exists():
|
|
686
663
|
with self.open("rb") as fsrc:
|
|
@@ -703,9 +680,9 @@ class Sftp2Path(URIPath):
|
|
|
703
680
|
if self.is_dir():
|
|
704
681
|
for file_entry in self.scandir():
|
|
705
682
|
self.from_path(file_entry.path).remove(missing_ok=missing_ok)
|
|
706
|
-
self._client.rmdir(self.
|
|
683
|
+
self._client.rmdir(self._remote_path)
|
|
707
684
|
else:
|
|
708
|
-
self._client.unlink(self.
|
|
685
|
+
self._client.unlink(self._remote_path)
|
|
709
686
|
|
|
710
687
|
def scan(self, missing_ok: bool = True, followlinks: bool = False) -> Iterator[str]:
|
|
711
688
|
"""Iteratively traverse only files in given directory"""
|
|
@@ -752,7 +729,7 @@ class Sftp2Path(URIPath):
|
|
|
752
729
|
|
|
753
730
|
def scandir(self) -> ContextIterator:
|
|
754
731
|
"""Get all content of given file path"""
|
|
755
|
-
|
|
732
|
+
remote_path = self._remote_path
|
|
756
733
|
stat_result = None
|
|
757
734
|
try:
|
|
758
735
|
stat_result = self.stat(follow_symlinks=False)
|
|
@@ -760,13 +737,13 @@ class Sftp2Path(URIPath):
|
|
|
760
737
|
raise NotADirectoryError(f"Not a directory: '{self.path_with_protocol}'")
|
|
761
738
|
|
|
762
739
|
if stat_result.is_symlink():
|
|
763
|
-
|
|
740
|
+
remote_path = self.readlink()._remote_path
|
|
764
741
|
elif not stat_result.is_dir():
|
|
765
742
|
raise NotADirectoryError(f"Not a directory: '{self.path_with_protocol}'")
|
|
766
743
|
|
|
767
744
|
def create_generator():
|
|
768
745
|
# Use opendir and readdir from ssh2-python
|
|
769
|
-
dir_handle = self._client.opendir(
|
|
746
|
+
dir_handle = self._client.opendir(remote_path)
|
|
770
747
|
try:
|
|
771
748
|
# ssh2-python's readdir returns a generator
|
|
772
749
|
# First call returns all entries, subsequent calls return empty
|
|
@@ -794,9 +771,9 @@ class Sftp2Path(URIPath):
|
|
|
794
771
|
"""Get StatResult of file on sftp2"""
|
|
795
772
|
try:
|
|
796
773
|
if follow_symlinks:
|
|
797
|
-
stat = self._client.stat(self.
|
|
774
|
+
stat = self._client.stat(self._remote_path)
|
|
798
775
|
else:
|
|
799
|
-
stat = self._client.lstat(self.
|
|
776
|
+
stat = self._client.lstat(self._remote_path)
|
|
800
777
|
return _make_stat(stat)
|
|
801
778
|
except SFTPProtocolError as e: # pytype: disable=mro-error
|
|
802
779
|
raise FileNotFoundError(
|
|
@@ -811,7 +788,7 @@ class Sftp2Path(URIPath):
|
|
|
811
788
|
"""Remove the file on sftp2"""
|
|
812
789
|
if missing_ok and not self.exists():
|
|
813
790
|
return
|
|
814
|
-
self._client.unlink(self.
|
|
791
|
+
self._client.unlink(self._remote_path)
|
|
815
792
|
|
|
816
793
|
def walk(
|
|
817
794
|
self, followlinks: bool = False
|
|
@@ -823,7 +800,7 @@ class Sftp2Path(URIPath):
|
|
|
823
800
|
if self.is_file(followlinks=followlinks):
|
|
824
801
|
return
|
|
825
802
|
|
|
826
|
-
stack = [self.
|
|
803
|
+
stack = [self._remote_path]
|
|
827
804
|
while stack:
|
|
828
805
|
root = stack.pop()
|
|
829
806
|
dirs, files = [], []
|
|
@@ -848,8 +825,8 @@ class Sftp2Path(URIPath):
|
|
|
848
825
|
|
|
849
826
|
def resolve(self, strict=False) -> "Sftp2Path":
|
|
850
827
|
"""Return the canonical path"""
|
|
851
|
-
path = self._client.realpath(self.
|
|
852
|
-
return self._generate_path_object(path
|
|
828
|
+
path = self._client.realpath(self._remote_path)
|
|
829
|
+
return self._generate_path_object(path)
|
|
853
830
|
|
|
854
831
|
def md5(self, recalculate: bool = False, followlinks: bool = False):
|
|
855
832
|
"""Calculate the md5 value of the file"""
|
|
@@ -872,7 +849,7 @@ class Sftp2Path(URIPath):
|
|
|
872
849
|
dst_path = self.from_path(dst_path)
|
|
873
850
|
if dst_path.exists(followlinks=False):
|
|
874
851
|
raise FileExistsError(f"File exists: '{dst_path.path_with_protocol}'")
|
|
875
|
-
return self._client.symlink(self.
|
|
852
|
+
return self._client.symlink(self._remote_path, dst_path._remote_path)
|
|
876
853
|
|
|
877
854
|
def readlink(self) -> "Sftp2Path":
|
|
878
855
|
"""Return a Sftp2Path instance representing the path to which the
|
|
@@ -884,7 +861,7 @@ class Sftp2Path(URIPath):
|
|
|
884
861
|
if not self.is_symlink():
|
|
885
862
|
raise OSError(f"Not a symlink: {self.path_with_protocol!r}")
|
|
886
863
|
try:
|
|
887
|
-
path = self._client.
|
|
864
|
+
path = self._client.realpath(self._remote_path)
|
|
888
865
|
if not path:
|
|
889
866
|
raise OSError(f"Not a symlink: {self.path_with_protocol!r}")
|
|
890
867
|
if not path.startswith("/"):
|
|
@@ -948,7 +925,7 @@ class Sftp2Path(URIPath):
|
|
|
948
925
|
| ssh2.sftp.LIBSSH2_FXF_APPEND
|
|
949
926
|
)
|
|
950
927
|
|
|
951
|
-
sftp_handle = self._client.open(self.
|
|
928
|
+
sftp_handle = self._client.open(self._remote_path, ssh2_mode, 0o644)
|
|
952
929
|
|
|
953
930
|
# Create raw file wrapper
|
|
954
931
|
raw_file = Sftp2RawFile(sftp_handle, self.path, mode)
|
|
@@ -976,7 +953,9 @@ class Sftp2Path(URIPath):
|
|
|
976
953
|
|
|
977
954
|
def chmod(self, mode: int, *, follow_symlinks: bool = True):
|
|
978
955
|
"""Change the file mode and permissions"""
|
|
979
|
-
|
|
956
|
+
stat = SFTPAttributes()
|
|
957
|
+
stat.permissions = int(mode)
|
|
958
|
+
return self._client.setstat(self._remote_path, stat)
|
|
980
959
|
|
|
981
960
|
def absolute(self) -> "Sftp2Path":
|
|
982
961
|
"""Make the path absolute"""
|
|
@@ -986,7 +965,7 @@ class Sftp2Path(URIPath):
|
|
|
986
965
|
"""Remove this directory. The directory must be empty"""
|
|
987
966
|
if len(self.listdir()) > 0:
|
|
988
967
|
raise OSError(f"Directory not empty: '{self.path_with_protocol}'")
|
|
989
|
-
return self._client.rmdir(self.
|
|
968
|
+
return self._client.rmdir(self._remote_path)
|
|
990
969
|
|
|
991
970
|
def copy(
|
|
992
971
|
self,
|
|
@@ -1014,7 +993,7 @@ class Sftp2Path(URIPath):
|
|
|
1014
993
|
dst_path = self.from_path(dst_path)
|
|
1015
994
|
|
|
1016
995
|
if self._is_same_backend(dst_path):
|
|
1017
|
-
if self.
|
|
996
|
+
if self._remote_path == dst_path._remote_path:
|
|
1018
997
|
raise SameFileError(
|
|
1019
998
|
f"'{self.path}' and '{dst_path.path}' are the same file"
|
|
1020
999
|
)
|
|
@@ -1022,8 +1001,8 @@ class Sftp2Path(URIPath):
|
|
|
1022
1001
|
exec_result = self._exec_command(
|
|
1023
1002
|
[
|
|
1024
1003
|
"cp",
|
|
1025
|
-
self.
|
|
1026
|
-
dst_path.
|
|
1004
|
+
self._remote_path,
|
|
1005
|
+
dst_path._remote_path,
|
|
1027
1006
|
]
|
|
1028
1007
|
)
|
|
1029
1008
|
|
|
@@ -1081,4 +1060,7 @@ class Sftp2Path(URIPath):
|
|
|
1081
1060
|
|
|
1082
1061
|
def utime(self, atime: Union[float, int], mtime: Union[float, int]) -> None:
|
|
1083
1062
|
"""Set the access and modified times of the file"""
|
|
1084
|
-
|
|
1063
|
+
stat = SFTPAttributes()
|
|
1064
|
+
stat.atime = int(atime)
|
|
1065
|
+
stat.mtime = int(mtime)
|
|
1066
|
+
self._client.setstat(self._remote_path, stat)
|