megfile 4.0.0.post1__py3-none-any.whl → 4.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- megfile/__init__.py +2 -0
- megfile/cli.py +55 -1
- megfile/config.py +68 -4
- megfile/lib/s3_prefetch_reader.py +21 -17
- megfile/sftp.py +77 -0
- megfile/sftp_path.py +1 -0
- megfile/smart.py +4 -3
- megfile/version.py +1 -1
- {megfile-4.0.0.post1.dist-info → megfile-4.0.1.dist-info}/METADATA +3 -3
- {megfile-4.0.0.post1.dist-info → megfile-4.0.1.dist-info}/RECORD +15 -15
- {megfile-4.0.0.post1.dist-info → megfile-4.0.1.dist-info}/LICENSE +0 -0
- {megfile-4.0.0.post1.dist-info → megfile-4.0.1.dist-info}/LICENSE.pyre +0 -0
- {megfile-4.0.0.post1.dist-info → megfile-4.0.1.dist-info}/WHEEL +0 -0
- {megfile-4.0.0.post1.dist-info → megfile-4.0.1.dist-info}/entry_points.txt +0 -0
- {megfile-4.0.0.post1.dist-info → megfile-4.0.1.dist-info}/top_level.txt +0 -0
megfile/__init__.py
CHANGED
|
@@ -121,6 +121,7 @@ from megfile.s3_path import S3Path
|
|
|
121
121
|
from megfile.sftp import (
|
|
122
122
|
is_sftp,
|
|
123
123
|
sftp_absolute,
|
|
124
|
+
sftp_add_host_key,
|
|
124
125
|
sftp_chmod,
|
|
125
126
|
sftp_concat,
|
|
126
127
|
sftp_copy,
|
|
@@ -371,6 +372,7 @@ __all__ = [
|
|
|
371
372
|
"sftp_copy",
|
|
372
373
|
"sftp_sync",
|
|
373
374
|
"sftp_concat",
|
|
375
|
+
"sftp_add_host_key",
|
|
374
376
|
"is_hdfs",
|
|
375
377
|
"hdfs_exists",
|
|
376
378
|
"hdfs_stat",
|
megfile/cli.py
CHANGED
|
@@ -10,10 +10,11 @@ from functools import partial
|
|
|
10
10
|
import click
|
|
11
11
|
from tqdm import tqdm
|
|
12
12
|
|
|
13
|
-
from megfile.config import READER_BLOCK_SIZE
|
|
13
|
+
from megfile.config import READER_BLOCK_SIZE, SFTP_HOST_KEY_POLICY
|
|
14
14
|
from megfile.hdfs_path import DEFAULT_HDFS_TIMEOUT
|
|
15
15
|
from megfile.interfaces import FileEntry
|
|
16
16
|
from megfile.lib.glob import get_non_glob_dir, has_magic
|
|
17
|
+
from megfile.sftp import sftp_add_host_key
|
|
17
18
|
from megfile.smart import (
|
|
18
19
|
_smart_sync_single_file,
|
|
19
20
|
smart_copy,
|
|
@@ -110,6 +111,23 @@ def smart_list_stat(path):
|
|
|
110
111
|
yield from smart_scandir(path)
|
|
111
112
|
|
|
112
113
|
|
|
114
|
+
def _sftp_prompt_host_key(path):
|
|
115
|
+
if SFTP_HOST_KEY_POLICY == "auto":
|
|
116
|
+
return
|
|
117
|
+
|
|
118
|
+
path = SmartPath(path)
|
|
119
|
+
if path.protocol == "sftp":
|
|
120
|
+
hostname = (
|
|
121
|
+
path.pathlike._urlsplit_parts.hostname # pytype: disable=attribute-error
|
|
122
|
+
)
|
|
123
|
+
port = path.pathlike._urlsplit_parts.port # pytype: disable=attribute-error
|
|
124
|
+
sftp_add_host_key(
|
|
125
|
+
hostname=hostname,
|
|
126
|
+
port=port,
|
|
127
|
+
prompt=True,
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
|
|
113
131
|
def _ls(path: str, long: bool, recursive: bool, human_readable: bool):
|
|
114
132
|
base_path = path
|
|
115
133
|
full_path = False
|
|
@@ -121,6 +139,9 @@ def _ls(path: str, long: bool, recursive: bool, human_readable: bool):
|
|
|
121
139
|
scan_func = smart_scan_stat
|
|
122
140
|
else:
|
|
123
141
|
scan_func = smart_list_stat
|
|
142
|
+
|
|
143
|
+
_sftp_prompt_host_key(base_path)
|
|
144
|
+
|
|
124
145
|
if long:
|
|
125
146
|
if human_readable:
|
|
126
147
|
echo_func = human_echo
|
|
@@ -209,6 +230,10 @@ def cp(
|
|
|
209
230
|
):
|
|
210
231
|
if not no_target_directory and (dst_path.endswith("/") or smart_isdir(dst_path)):
|
|
211
232
|
dst_path = smart_path_join(dst_path, os.path.basename(src_path))
|
|
233
|
+
|
|
234
|
+
_sftp_prompt_host_key(src_path)
|
|
235
|
+
_sftp_prompt_host_key(dst_path)
|
|
236
|
+
|
|
212
237
|
if recursive:
|
|
213
238
|
with ThreadPoolExecutor(max_workers=(os.cpu_count() or 1) * 2) as executor:
|
|
214
239
|
if progress_bar:
|
|
@@ -274,6 +299,10 @@ def mv(
|
|
|
274
299
|
):
|
|
275
300
|
if not no_target_directory and (dst_path.endswith("/") or smart_isdir(dst_path)):
|
|
276
301
|
dst_path = smart_path_join(dst_path, os.path.basename(src_path))
|
|
302
|
+
|
|
303
|
+
_sftp_prompt_host_key(src_path)
|
|
304
|
+
_sftp_prompt_host_key(dst_path)
|
|
305
|
+
|
|
277
306
|
if progress_bar:
|
|
278
307
|
src_protocol, _ = SmartPath._extract_protocol(src_path)
|
|
279
308
|
dst_protocol, _ = SmartPath._extract_protocol(dst_path)
|
|
@@ -324,6 +353,8 @@ def mv(
|
|
|
324
353
|
"under the specified directory or prefix.",
|
|
325
354
|
)
|
|
326
355
|
def rm(path: str, recursive: bool):
|
|
356
|
+
_sftp_prompt_host_key(path)
|
|
357
|
+
|
|
327
358
|
remove_func = smart_remove if recursive else smart_unlink
|
|
328
359
|
remove_func(path)
|
|
329
360
|
|
|
@@ -349,6 +380,9 @@ def sync(
|
|
|
349
380
|
quiet: bool,
|
|
350
381
|
skip: bool,
|
|
351
382
|
):
|
|
383
|
+
_sftp_prompt_host_key(src_path)
|
|
384
|
+
_sftp_prompt_host_key(dst_path)
|
|
385
|
+
|
|
352
386
|
if not smart_exists(dst_path):
|
|
353
387
|
force = True
|
|
354
388
|
|
|
@@ -434,18 +468,24 @@ def sync(
|
|
|
434
468
|
@cli.command(short_help="Make the path if it doesn't already exist.")
|
|
435
469
|
@click.argument("path")
|
|
436
470
|
def mkdir(path: str):
|
|
471
|
+
_sftp_prompt_host_key(path)
|
|
472
|
+
|
|
437
473
|
smart_makedirs(path)
|
|
438
474
|
|
|
439
475
|
|
|
440
476
|
@cli.command(short_help="Make the file if it doesn't already exist.")
|
|
441
477
|
@click.argument("path")
|
|
442
478
|
def touch(path: str):
|
|
479
|
+
_sftp_prompt_host_key(path)
|
|
480
|
+
|
|
443
481
|
smart_touch(path)
|
|
444
482
|
|
|
445
483
|
|
|
446
484
|
@cli.command(short_help="Concatenate any files and send them to stdout.")
|
|
447
485
|
@click.argument("path")
|
|
448
486
|
def cat(path: str):
|
|
487
|
+
_sftp_prompt_host_key(path)
|
|
488
|
+
|
|
449
489
|
with smart_open(path, "rb") as f:
|
|
450
490
|
shutil.copyfileobj(f, sys.stdout.buffer) # pytype: disable=wrong-arg-types
|
|
451
491
|
|
|
@@ -458,6 +498,8 @@ def cat(path: str):
|
|
|
458
498
|
"-n", "--lines", type=click.INT, default=10, help="print the first NUM lines"
|
|
459
499
|
)
|
|
460
500
|
def head(path: str, lines: int):
|
|
501
|
+
_sftp_prompt_host_key(path)
|
|
502
|
+
|
|
461
503
|
with smart_open(path, "rb") as f:
|
|
462
504
|
for _ in range(lines):
|
|
463
505
|
try:
|
|
@@ -480,6 +522,8 @@ def head(path: str, lines: int):
|
|
|
480
522
|
"-f", "--follow", is_flag=True, help="output appended data as the file grows"
|
|
481
523
|
)
|
|
482
524
|
def tail(path: str, lines: int, follow: bool):
|
|
525
|
+
_sftp_prompt_host_key(path)
|
|
526
|
+
|
|
483
527
|
line_list = []
|
|
484
528
|
with smart_open(path, "rb") as f:
|
|
485
529
|
f.seek(0, os.SEEK_END)
|
|
@@ -524,6 +568,8 @@ def tail(path: str, lines: int, follow: bool):
|
|
|
524
568
|
@click.option("-a", "--append", is_flag=True, help="Append to the given file")
|
|
525
569
|
@click.option("-o", "--stdout", is_flag=True, help="File content to standard output")
|
|
526
570
|
def to(path: str, append: bool, stdout: bool):
|
|
571
|
+
_sftp_prompt_host_key(path)
|
|
572
|
+
|
|
527
573
|
mode = "wb"
|
|
528
574
|
if append:
|
|
529
575
|
mode = "ab"
|
|
@@ -545,24 +591,32 @@ def to(path: str, append: bool, stdout: bool):
|
|
|
545
591
|
@cli.command(short_help="Produce an md5sum file for all the objects in the path.")
|
|
546
592
|
@click.argument("path")
|
|
547
593
|
def md5sum(path: str):
|
|
594
|
+
_sftp_prompt_host_key(path)
|
|
595
|
+
|
|
548
596
|
click.echo(smart_getmd5(path, recalculate=True))
|
|
549
597
|
|
|
550
598
|
|
|
551
599
|
@cli.command(short_help="Return the total size and number of objects in remote:path.")
|
|
552
600
|
@click.argument("path")
|
|
553
601
|
def size(path: str):
|
|
602
|
+
_sftp_prompt_host_key(path)
|
|
603
|
+
|
|
554
604
|
click.echo(smart_getsize(path))
|
|
555
605
|
|
|
556
606
|
|
|
557
607
|
@cli.command(short_help="Return the mtime and number of objects in remote:path.")
|
|
558
608
|
@click.argument("path")
|
|
559
609
|
def mtime(path: str):
|
|
610
|
+
_sftp_prompt_host_key(path)
|
|
611
|
+
|
|
560
612
|
click.echo(smart_getmtime(path))
|
|
561
613
|
|
|
562
614
|
|
|
563
615
|
@cli.command(short_help="Return the stat and number of objects in remote:path.")
|
|
564
616
|
@click.argument("path")
|
|
565
617
|
def stat(path: str):
|
|
618
|
+
_sftp_prompt_host_key(path)
|
|
619
|
+
|
|
566
620
|
click.echo(smart_stat(path))
|
|
567
621
|
|
|
568
622
|
|
megfile/config.py
CHANGED
|
@@ -1,26 +1,90 @@
|
|
|
1
1
|
import os
|
|
2
|
+
import typing as T
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def parse_quantity(quantity: T.Union[str, int]) -> int:
|
|
6
|
+
"""
|
|
7
|
+
Parse kubernetes canonical form quantity like 200Mi to a int number.
|
|
8
|
+
Supported SI suffixes:
|
|
9
|
+
base1024: Ki | Mi | Gi | Ti | Pi | Ei
|
|
10
|
+
base1000: "" | k | M | G | T | P | E
|
|
11
|
+
|
|
12
|
+
(International System of units; See: http://physics.nist.gov/cuu/Units/binary.html)
|
|
13
|
+
|
|
14
|
+
Input:
|
|
15
|
+
quantity: string. kubernetes canonical form quantity
|
|
16
|
+
|
|
17
|
+
Returns:
|
|
18
|
+
Int
|
|
19
|
+
|
|
20
|
+
Raises:
|
|
21
|
+
ValueError on invalid or unknown input
|
|
22
|
+
"""
|
|
23
|
+
if isinstance(quantity, int):
|
|
24
|
+
return quantity
|
|
25
|
+
|
|
26
|
+
exponents = {"K": 1, "k": 1, "M": 2, "G": 3, "T": 4, "P": 5, "E": 6}
|
|
27
|
+
|
|
28
|
+
number = quantity
|
|
29
|
+
suffix = None
|
|
30
|
+
if len(quantity) >= 2 and quantity[-1] == "i":
|
|
31
|
+
if quantity[-2] in exponents:
|
|
32
|
+
number = quantity[:-2]
|
|
33
|
+
suffix = quantity[-2:]
|
|
34
|
+
elif len(quantity) >= 1 and quantity[-1] in exponents:
|
|
35
|
+
number = quantity[:-1]
|
|
36
|
+
suffix = quantity[-1:]
|
|
37
|
+
|
|
38
|
+
try:
|
|
39
|
+
number = int(number)
|
|
40
|
+
except ValueError:
|
|
41
|
+
raise ValueError("Invalid number format: {}".format(number))
|
|
42
|
+
|
|
43
|
+
if suffix is None:
|
|
44
|
+
return number
|
|
45
|
+
|
|
46
|
+
if suffix.endswith("i"):
|
|
47
|
+
base = 1024
|
|
48
|
+
elif len(suffix) == 1:
|
|
49
|
+
base = 1000
|
|
50
|
+
else:
|
|
51
|
+
raise ValueError("{} has unknown suffix".format(quantity))
|
|
52
|
+
|
|
53
|
+
# handle SI inconsistency
|
|
54
|
+
if suffix == "ki":
|
|
55
|
+
raise ValueError("{} has unknown suffix".format(quantity))
|
|
56
|
+
|
|
57
|
+
if suffix[0] not in exponents:
|
|
58
|
+
raise ValueError("{} has unknown suffix".format(quantity))
|
|
59
|
+
|
|
60
|
+
exponent = int(exponents[suffix[0]])
|
|
61
|
+
return number * (base**exponent) # pytype: disable=bad-return-type
|
|
2
62
|
|
|
3
63
|
|
|
4
64
|
def to_boolean(value):
|
|
5
65
|
return value.lower() in ("true", "yes", "1")
|
|
6
66
|
|
|
7
67
|
|
|
8
|
-
READER_BLOCK_SIZE =
|
|
68
|
+
READER_BLOCK_SIZE = parse_quantity(os.getenv("MEGFILE_READER_BLOCK_SIZE") or 8 * 2**20)
|
|
9
69
|
if READER_BLOCK_SIZE <= 0:
|
|
10
70
|
raise ValueError(
|
|
11
71
|
f"'MEGFILE_READER_BLOCK_SIZE' must bigger than 0, got {READER_BLOCK_SIZE}"
|
|
12
72
|
)
|
|
13
|
-
READER_MAX_BUFFER_SIZE =
|
|
73
|
+
READER_MAX_BUFFER_SIZE = parse_quantity(
|
|
74
|
+
os.getenv("MEGFILE_READER_MAX_BUFFER_SIZE") or 128 * 2**20
|
|
75
|
+
)
|
|
14
76
|
|
|
15
77
|
# Multi-upload in aws s3 has a maximum of 10,000 parts,
|
|
16
78
|
# so the maximum supported file size is MEGFILE_WRITE_BLOCK_SIZE * 10,000,
|
|
17
79
|
# the largest object that can be uploaded in a single PUT is 5 TB in aws s3.
|
|
18
|
-
WRITER_BLOCK_SIZE =
|
|
80
|
+
WRITER_BLOCK_SIZE = parse_quantity(os.getenv("MEGFILE_WRITER_BLOCK_SIZE") or 8 * 2**20)
|
|
19
81
|
if WRITER_BLOCK_SIZE <= 0:
|
|
20
82
|
raise ValueError(
|
|
21
83
|
f"'MEGFILE_WRITER_BLOCK_SIZE' must bigger than 0, got {WRITER_BLOCK_SIZE}"
|
|
22
84
|
)
|
|
23
|
-
WRITER_MAX_BUFFER_SIZE =
|
|
85
|
+
WRITER_MAX_BUFFER_SIZE = parse_quantity(
|
|
86
|
+
os.getenv("MEGFILE_WRITER_MAX_BUFFER_SIZE") or 128 * 2**20
|
|
87
|
+
)
|
|
24
88
|
DEFAULT_WRITER_BLOCK_AUTOSCALE = not os.getenv("MEGFILE_WRITER_BLOCK_SIZE")
|
|
25
89
|
if os.getenv("MEGFILE_WRITER_BLOCK_AUTOSCALE"):
|
|
26
90
|
DEFAULT_WRITER_BLOCK_AUTOSCALE = to_boolean(
|
|
@@ -52,7 +52,6 @@ class S3PrefetchReader(BasePrefetchReader):
|
|
|
52
52
|
self._client = s3_client
|
|
53
53
|
self._profile_name = profile_name
|
|
54
54
|
self._content_etag = None
|
|
55
|
-
self._content_info = None
|
|
56
55
|
|
|
57
56
|
super().__init__(
|
|
58
57
|
block_size=block_size,
|
|
@@ -63,26 +62,31 @@ class S3PrefetchReader(BasePrefetchReader):
|
|
|
63
62
|
)
|
|
64
63
|
|
|
65
64
|
def _get_content_size(self):
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
65
|
+
if self._block_capacity > 0:
|
|
66
|
+
try:
|
|
67
|
+
start, end = 0, self._block_size - 1
|
|
68
|
+
first_index_response = self._fetch_response(start=start, end=end)
|
|
69
|
+
if "ContentRange" in first_index_response:
|
|
70
|
+
content_size = int(
|
|
71
|
+
first_index_response["ContentRange"].split("/")[-1]
|
|
72
|
+
)
|
|
73
|
+
else:
|
|
74
|
+
# usually when read a file only have one block
|
|
75
|
+
content_size = int(first_index_response["ContentLength"])
|
|
76
|
+
except S3InvalidRangeError:
|
|
77
|
+
# usually when read a empty file
|
|
78
|
+
# can use minio test empty file: https://hub.docker.com/r/minio/minio
|
|
79
|
+
first_index_response = self._fetch_response()
|
|
73
80
|
content_size = int(first_index_response["ContentLength"])
|
|
74
|
-
except S3InvalidRangeError:
|
|
75
|
-
# usually when read a empty file
|
|
76
|
-
# can use minio test empty file: https://hub.docker.com/r/minio/minio
|
|
77
|
-
first_index_response = self._fetch_response()
|
|
78
|
-
content_size = int(first_index_response["ContentLength"])
|
|
79
81
|
|
|
80
|
-
if self._block_capacity > 0:
|
|
81
82
|
first_future = Future()
|
|
82
83
|
first_future.set_result(first_index_response["Body"])
|
|
83
84
|
self._insert_futures(index=0, future=first_future)
|
|
84
|
-
|
|
85
|
-
|
|
85
|
+
self._content_etag = first_index_response["ETag"]
|
|
86
|
+
else:
|
|
87
|
+
response = self._client.head_object(Bucket=self._bucket, Key=self._key)
|
|
88
|
+
self._content_etag = response["ETag"]
|
|
89
|
+
content_size = int(response["ContentLength"])
|
|
86
90
|
return content_size
|
|
87
91
|
|
|
88
92
|
@property
|
|
@@ -121,7 +125,7 @@ class S3PrefetchReader(BasePrefetchReader):
|
|
|
121
125
|
if etag is not None and etag != self._content_etag:
|
|
122
126
|
raise S3FileChangedError(
|
|
123
127
|
"File changed: %r, etag before: %s, after: %s"
|
|
124
|
-
% (self.name, self.
|
|
128
|
+
% (self.name, self._content_etag, etag)
|
|
125
129
|
)
|
|
126
130
|
|
|
127
131
|
return response["Body"]
|
megfile/sftp.py
CHANGED
|
@@ -1,7 +1,11 @@
|
|
|
1
|
+
import base64
|
|
2
|
+
import hashlib
|
|
1
3
|
import os
|
|
2
4
|
from logging import getLogger as get_logger
|
|
3
5
|
from typing import IO, BinaryIO, Callable, Iterator, List, Optional, Tuple
|
|
4
6
|
|
|
7
|
+
import paramiko
|
|
8
|
+
|
|
5
9
|
from megfile.interfaces import FileEntry, PathLike, StatResult
|
|
6
10
|
from megfile.lib.compat import fspath
|
|
7
11
|
from megfile.lib.joinpath import uri_join
|
|
@@ -52,6 +56,7 @@ __all__ = [
|
|
|
52
56
|
"sftp_rmdir",
|
|
53
57
|
"sftp_copy",
|
|
54
58
|
"sftp_sync",
|
|
59
|
+
"sftp_add_host_key",
|
|
55
60
|
]
|
|
56
61
|
|
|
57
62
|
|
|
@@ -739,3 +744,75 @@ def sftp_sync(
|
|
|
739
744
|
:param overwrite: whether or not overwrite file when exists, default is True
|
|
740
745
|
"""
|
|
741
746
|
return SftpPath(src_path).sync(dst_path, followlinks, force, overwrite)
|
|
747
|
+
|
|
748
|
+
|
|
749
|
+
def _check_input(input_str: str, fingerprint: str, times: int = 0) -> bool:
|
|
750
|
+
answers = input_str.strip()
|
|
751
|
+
if answers.lower() in ("yes", "y") or answers == fingerprint:
|
|
752
|
+
return True
|
|
753
|
+
elif answers.lower() in ("no", "n"):
|
|
754
|
+
return False
|
|
755
|
+
elif times >= 10:
|
|
756
|
+
_logger.warning("Retried more than 10 times, give up")
|
|
757
|
+
return False
|
|
758
|
+
else:
|
|
759
|
+
input_str = input("Please type 'yes', 'no' or the fingerprint: ")
|
|
760
|
+
return _check_input(input_str, fingerprint, times=times + 1)
|
|
761
|
+
|
|
762
|
+
|
|
763
|
+
def _prompt_add_to_known_hosts(hostname, key) -> bool:
|
|
764
|
+
fingerprint = hashlib.sha256(key.asbytes()).digest()
|
|
765
|
+
fingerprint = f"SHA256:{base64.b64encode(fingerprint).decode('utf-8')}"
|
|
766
|
+
answers = input(f"""The authenticity of host '{hostname}' can't be established.
|
|
767
|
+
{key.get_name().upper()} key fingerprint is {fingerprint}.
|
|
768
|
+
This key is not known by any other names.
|
|
769
|
+
Are you sure you want to continue connecting (yes/no/[fingerprint])? """)
|
|
770
|
+
return _check_input(answers, fingerprint)
|
|
771
|
+
|
|
772
|
+
|
|
773
|
+
def sftp_add_host_key(
|
|
774
|
+
hostname: str,
|
|
775
|
+
port: int = 22,
|
|
776
|
+
prompt: bool = False,
|
|
777
|
+
host_key_path: Optional["str"] = None,
|
|
778
|
+
):
|
|
779
|
+
"""Add a host key to known_hosts.
|
|
780
|
+
|
|
781
|
+
:param hostname: hostname
|
|
782
|
+
:param port: port, default is 22
|
|
783
|
+
:param prompt: If True, requires user input of 'yes' or 'no' to decide whether to
|
|
784
|
+
add this host key
|
|
785
|
+
:param host_key_path: path of known_hosts, default is ~/.ssh/known_hosts
|
|
786
|
+
"""
|
|
787
|
+
if not host_key_path:
|
|
788
|
+
host_key_path = os.path.expanduser("~/.ssh/known_hosts")
|
|
789
|
+
|
|
790
|
+
if not os.path.exists(host_key_path):
|
|
791
|
+
dirname = os.path.dirname(host_key_path)
|
|
792
|
+
if dirname and dirname != ".":
|
|
793
|
+
os.makedirs(dirname, exist_ok=True, mode=0o700)
|
|
794
|
+
with open(host_key_path, "w"):
|
|
795
|
+
pass
|
|
796
|
+
os.chmod(host_key_path, 0o600)
|
|
797
|
+
|
|
798
|
+
host_key = paramiko.hostkeys.HostKeys(host_key_path)
|
|
799
|
+
if host_key.lookup(hostname):
|
|
800
|
+
return
|
|
801
|
+
|
|
802
|
+
transport = paramiko.Transport(
|
|
803
|
+
(
|
|
804
|
+
hostname,
|
|
805
|
+
port,
|
|
806
|
+
)
|
|
807
|
+
)
|
|
808
|
+
transport.connect()
|
|
809
|
+
key = transport.get_remote_server_key()
|
|
810
|
+
transport.close()
|
|
811
|
+
|
|
812
|
+
if prompt:
|
|
813
|
+
result = _prompt_add_to_known_hosts(hostname, key)
|
|
814
|
+
if not result:
|
|
815
|
+
return
|
|
816
|
+
|
|
817
|
+
host_key.add(hostname, key.get_name(), key)
|
|
818
|
+
host_key.save(host_key_path)
|
megfile/sftp_path.py
CHANGED
|
@@ -208,6 +208,7 @@ def _get_ssh_client(
|
|
|
208
208
|
policy = policies.get(SFTP_HOST_KEY_POLICY, default_policy)() # pyre-ignore[29]
|
|
209
209
|
|
|
210
210
|
ssh_client = paramiko.SSHClient()
|
|
211
|
+
ssh_client.load_system_host_keys()
|
|
211
212
|
ssh_client.set_missing_host_key_policy(policy)
|
|
212
213
|
max_unauth_connections = int(os.getenv(SFTP_MAX_UNAUTH_CONN, 10))
|
|
213
214
|
try:
|
megfile/smart.py
CHANGED
|
@@ -1001,11 +1001,12 @@ def smart_load_content(
|
|
|
1001
1001
|
return s3_load_content(path, start, stop)
|
|
1002
1002
|
|
|
1003
1003
|
with smart_open(path, "rb") as fd:
|
|
1004
|
-
if start:
|
|
1004
|
+
if start is not None:
|
|
1005
1005
|
fd.seek(start)
|
|
1006
1006
|
offset = -1
|
|
1007
|
-
if
|
|
1008
|
-
offset = stop - start
|
|
1007
|
+
if stop is not None:
|
|
1008
|
+
offset = stop - (start or 0) # start may be None
|
|
1009
|
+
assert offset >= 0, "stop should be greater than start"
|
|
1009
1010
|
return fd.read(offset) # pytype: disable=bad-return-type
|
|
1010
1011
|
|
|
1011
1012
|
|
megfile/version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
VERSION = "4.0.
|
|
1
|
+
VERSION = "4.0.1"
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: megfile
|
|
3
|
-
Version: 4.0.
|
|
3
|
+
Version: 4.0.1
|
|
4
4
|
Summary: Megvii file operation library
|
|
5
5
|
Author-email: megvii <megfile@megvii.com>
|
|
6
6
|
Project-URL: Homepage, https://github.com/megvii-research/megfile
|
|
@@ -200,10 +200,10 @@ s3 =
|
|
|
200
200
|
$ megfile config s3 accesskey secretkey \
|
|
201
201
|
--addressing-style virtual \
|
|
202
202
|
--endpoint-url https://tos-s3-cn-beijing.ivolces.com \
|
|
203
|
-
--profile tos
|
|
203
|
+
--profile-name tos
|
|
204
204
|
|
|
205
205
|
# create alias
|
|
206
|
-
$ megfile alias tos s3+tos
|
|
206
|
+
$ megfile config alias tos s3+tos
|
|
207
207
|
```
|
|
208
208
|
|
|
209
209
|
You can get the configuration from `~/.config/megfile/aliases.conf`, like:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
megfile/__init__.py,sha256=
|
|
2
|
-
megfile/cli.py,sha256=
|
|
3
|
-
megfile/config.py,sha256=
|
|
1
|
+
megfile/__init__.py,sha256=7oEfu410CFKzDWZ9RjL5xEJ1gtkJkTfvPrL_7TWdJuY,7366
|
|
2
|
+
megfile/cli.py,sha256=FuwxjU7-Z_pIU8sX8Jf-0QNSLCR8JqLD4fqXyj7TrQI,24704
|
|
3
|
+
megfile/config.py,sha256=4aXs2fw59ep1a6PfMT3Vq-AaOxTwaS8yfgMNcb3Cvw8,3652
|
|
4
4
|
megfile/errors.py,sha256=a55qKQgyfiLmV-qnojUFzq2gu9JXpj3ZiC2qVaWyUTA,14160
|
|
5
5
|
megfile/fs.py,sha256=bPGbapv41FzME87X3MhSNQRjNmHrI23FuFnjPT0ukQs,18239
|
|
6
6
|
megfile/fs_path.py,sha256=ZK-po1xqhHocMb9Vrxf5K9tDx3zxQmGxNIHY3Z7Akp8,39085
|
|
@@ -12,13 +12,13 @@ megfile/interfaces.py,sha256=p4UvVZpeLx5djd6bqqDaygIx_s-_AxIVj-gudTch4JE,8467
|
|
|
12
12
|
megfile/pathlike.py,sha256=vfuTBqSTIciRxkkqMfLfnBxWTEl9yns1yR8zgK4Raw0,31268
|
|
13
13
|
megfile/s3.py,sha256=zqAegH5tijcztEKcfHXmOYhAR880nTxaAzc2O0JJnjc,16661
|
|
14
14
|
megfile/s3_path.py,sha256=oBA9GdOseEtQJmh7LMDOf1sGamsEERs6Sm1jHpdksO8,93343
|
|
15
|
-
megfile/sftp.py,sha256=
|
|
16
|
-
megfile/sftp_path.py,sha256=
|
|
17
|
-
megfile/smart.py,sha256=
|
|
15
|
+
megfile/sftp.py,sha256=0ZnQlmhgvs7pYjFTcvvOyxTo2IUurE-hp1GN0hnIrdQ,26473
|
|
16
|
+
megfile/sftp_path.py,sha256=4rcbn3wqcOEs71W6qWu1efcj6MZUgrZm6U0Jan-eB70,43604
|
|
17
|
+
megfile/smart.py,sha256=h42U8WQvCy9UXOX0X3V1eOZ500wDeZy8YKfr-uygTaA,36881
|
|
18
18
|
megfile/smart_path.py,sha256=Bqg95T2-XZrRXWhH7GT-jMCYzD7i1SIXdczQxtOxiPs,7583
|
|
19
19
|
megfile/stdio.py,sha256=C_cGID_npthpwoPcsJMMEqqbVUPUnDxxJV9jLY2_D7c,635
|
|
20
20
|
megfile/stdio_path.py,sha256=L8ODNIwO79UIv13YYc2OTr6f4XTv4ZPyvBeRk83-AjA,2700
|
|
21
|
-
megfile/version.py,sha256=
|
|
21
|
+
megfile/version.py,sha256=AvvDxCXX9bKN1IZtn0pXhbfwx56BkY-VhwBBiNl-1JE,19
|
|
22
22
|
megfile/lib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
23
23
|
megfile/lib/base_prefetch_reader.py,sha256=6Dy2ZwlowqAvyUUa7bpQLCKOclmmUDhqEF-_CDDp0Og,13100
|
|
24
24
|
megfile/lib/combine_reader.py,sha256=nKGAug29lOpNIZuLKu7_qVrJJRpXL_J4jxLglWbGJ1w,4808
|
|
@@ -36,17 +36,17 @@ megfile/lib/s3_cached_handler.py,sha256=X8PdeRC-BY6eSmOO5f2BeyjTPxyEwNtHgmAm9Vgm
|
|
|
36
36
|
megfile/lib/s3_limited_seekable_writer.py,sha256=mUeoTS98LHluwDN7zxdCVcsjOGBT1bOYV8nRvi9QMGE,6212
|
|
37
37
|
megfile/lib/s3_memory_handler.py,sha256=4uzBzz2jfRI_u6jl0CpOGAhpNJhDQo18FSAweauCUFs,4136
|
|
38
38
|
megfile/lib/s3_pipe_handler.py,sha256=dm7NnZd1Ym5ABS1GvOQtoCJEO_CB8e6p4sUhLiid0go,3622
|
|
39
|
-
megfile/lib/s3_prefetch_reader.py,sha256=
|
|
39
|
+
megfile/lib/s3_prefetch_reader.py,sha256=ujqQMXAxsaNtWA_VdqfwqcEKBJFFv0GauAaj2WGLTCs,4552
|
|
40
40
|
megfile/lib/s3_share_cache_reader.py,sha256=LVWKxHdHo0_zUIW4o8yqNvplqqwezUPeYEt02Vj-WNM,3754
|
|
41
41
|
megfile/lib/shadow_handler.py,sha256=TntewlvIW9ZxCfmqASDQREHoiZ8v42faOe9sovQYQz0,2779
|
|
42
42
|
megfile/lib/stdio_handler.py,sha256=IDdgENLQlhigEwkLL4zStueVSzdWg7xVcTF_koof_Ek,1987
|
|
43
43
|
megfile/lib/url.py,sha256=ER32pWy9Q2MAk3TraAaNEBWIqUeBmLuM57ol2cs7-Ks,103
|
|
44
44
|
megfile/utils/__init__.py,sha256=sATf_NlsSTYIMEiA8-gM6K1M-Q1K6_7rx2VM31hrqaA,10838
|
|
45
45
|
megfile/utils/mutex.py,sha256=asb8opGLgK22RiuBJUnfsvB8LnMmodP8KzCVHKmQBWA,2561
|
|
46
|
-
megfile-4.0.
|
|
47
|
-
megfile-4.0.
|
|
48
|
-
megfile-4.0.
|
|
49
|
-
megfile-4.0.
|
|
50
|
-
megfile-4.0.
|
|
51
|
-
megfile-4.0.
|
|
52
|
-
megfile-4.0.
|
|
46
|
+
megfile-4.0.1.dist-info/LICENSE,sha256=WNHhf_5RCaeuKWyq_K39vmp9F28LxKsB4SpomwSZ2L0,11357
|
|
47
|
+
megfile-4.0.1.dist-info/LICENSE.pyre,sha256=9lf5nT-5ZH25JijpYAequ0bl8E8z5JmZB1qrjiUMp84,1080
|
|
48
|
+
megfile-4.0.1.dist-info/METADATA,sha256=GVa381PxFA9AaKcRRWpUdxvOwBxBSbPhZud6_HP_1eE,9240
|
|
49
|
+
megfile-4.0.1.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
|
|
50
|
+
megfile-4.0.1.dist-info/entry_points.txt,sha256=M6ZWSSv5_5_QtIpZafy3vq7WuOJ_5dSGQQnEZbByt2Q,49
|
|
51
|
+
megfile-4.0.1.dist-info/top_level.txt,sha256=IaHHoRXeemLL6kTM5YuC3H0UyOnTdZH9J324TkeBneo,36
|
|
52
|
+
megfile-4.0.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|