megfile 4.1.0.post3__py3-none-any.whl → 4.1.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- megfile/cli.py +68 -36
- megfile/errors.py +31 -19
- megfile/fs.py +28 -3
- megfile/smart.py +26 -38
- megfile/smart_path.py +1 -1
- megfile/version.py +1 -1
- {megfile-4.1.0.post3.dist-info → megfile-4.1.2.dist-info}/METADATA +3 -2
- {megfile-4.1.0.post3.dist-info → megfile-4.1.2.dist-info}/RECORD +13 -13
- {megfile-4.1.0.post3.dist-info → megfile-4.1.2.dist-info}/WHEEL +1 -1
- {megfile-4.1.0.post3.dist-info → megfile-4.1.2.dist-info}/top_level.txt +0 -1
- {megfile-4.1.0.post3.dist-info → megfile-4.1.2.dist-info}/entry_points.txt +0 -0
- {megfile-4.1.0.post3.dist-info → megfile-4.1.2.dist-info/licenses}/LICENSE +0 -0
- {megfile-4.1.0.post3.dist-info → megfile-4.1.2.dist-info/licenses}/LICENSE.pyre +0 -0
megfile/cli.py
CHANGED
|
@@ -1,10 +1,12 @@
|
|
|
1
1
|
import configparser
|
|
2
2
|
import os
|
|
3
3
|
import shutil
|
|
4
|
+
import signal
|
|
4
5
|
import sys
|
|
5
6
|
import time
|
|
6
7
|
from concurrent.futures import ThreadPoolExecutor
|
|
7
8
|
from functools import partial
|
|
9
|
+
from queue import Queue
|
|
8
10
|
|
|
9
11
|
import click
|
|
10
12
|
from tqdm import tqdm
|
|
@@ -69,10 +71,13 @@ def cli(debug, log_level):
|
|
|
69
71
|
|
|
70
72
|
|
|
71
73
|
def safe_cli(): # pragma: no cover
|
|
74
|
+
debug = options.get("debug", False)
|
|
75
|
+
if not debug:
|
|
76
|
+
signal.signal(signal.SIGINT, signal.SIG_DFL)
|
|
72
77
|
try:
|
|
73
78
|
cli()
|
|
74
79
|
except Exception as e:
|
|
75
|
-
if
|
|
80
|
+
if debug:
|
|
76
81
|
raise
|
|
77
82
|
else:
|
|
78
83
|
click.echo(f"\n[{type(e).__name__}] {e}", err=True)
|
|
@@ -364,23 +369,25 @@ def rm(path: str, recursive: bool):
|
|
|
364
369
|
@cli.command(short_help="Make source and dest identical, modifying destination only.")
|
|
365
370
|
@click.argument("src_path")
|
|
366
371
|
@click.argument("dst_path")
|
|
367
|
-
@click.option("-g", "--progress-bar", is_flag=True, help="Show progress bar.")
|
|
368
372
|
@click.option(
|
|
369
|
-
"-
|
|
373
|
+
"-f", "--force", is_flag=True, help="Copy files forcible, ignore same files."
|
|
370
374
|
)
|
|
375
|
+
@click.option("--skip", is_flag=True, help="Skip existed files.")
|
|
371
376
|
@click.option(
|
|
372
|
-
"-
|
|
377
|
+
"-w", "--worker", type=click.INT, default=-1, help="Number of concurrent workers."
|
|
373
378
|
)
|
|
379
|
+
@click.option("-g", "--progress-bar", is_flag=True, help="Show progress bar.")
|
|
380
|
+
@click.option("-v", "--verbose", is_flag=True, help="Show more progress log.")
|
|
374
381
|
@click.option("-q", "--quiet", is_flag=True, help="Not show any progress log.")
|
|
375
|
-
@click.option("--skip", is_flag=True, help="Skip existed files.")
|
|
376
382
|
def sync(
|
|
377
383
|
src_path: str,
|
|
378
384
|
dst_path: str,
|
|
379
|
-
progress_bar: bool,
|
|
380
|
-
worker: int,
|
|
381
385
|
force: bool,
|
|
382
|
-
quiet: bool,
|
|
383
386
|
skip: bool,
|
|
387
|
+
worker: int,
|
|
388
|
+
progress_bar: bool,
|
|
389
|
+
verbose: bool,
|
|
390
|
+
quiet: bool,
|
|
384
391
|
):
|
|
385
392
|
_sftp_prompt_host_key(src_path)
|
|
386
393
|
_sftp_prompt_host_key(dst_path)
|
|
@@ -388,7 +395,8 @@ def sync(
|
|
|
388
395
|
if not smart_exists(dst_path):
|
|
389
396
|
force = True
|
|
390
397
|
|
|
391
|
-
|
|
398
|
+
max_workers = worker if worker > 0 else (os.cpu_count() or 1) * 2
|
|
399
|
+
with ThreadPoolExecutor(max_workers=max_workers + 1) as executor: # +1 for scan
|
|
392
400
|
if has_magic(src_path):
|
|
393
401
|
src_root_path = get_non_glob_dir(src_path)
|
|
394
402
|
if not smart_exists(src_root_path):
|
|
@@ -410,42 +418,66 @@ def sync(
|
|
|
410
418
|
src_root_path = src_path
|
|
411
419
|
scan_func = partial(smart_scan_stat, followlinks=True)
|
|
412
420
|
|
|
413
|
-
if progress_bar and not quiet:
|
|
414
|
-
print("building progress bar", end="\r")
|
|
415
|
-
file_entries = []
|
|
416
|
-
total_count = total_size = 0
|
|
417
|
-
for total_count, file_entry in enumerate(scan_func(src_path), start=1):
|
|
418
|
-
if total_count > max_file_object_catch_count:
|
|
419
|
-
file_entries = []
|
|
420
|
-
else:
|
|
421
|
-
file_entries.append(file_entry)
|
|
422
|
-
total_size += file_entry.stat.size
|
|
423
|
-
print(f"building progress bar, find {total_count} files", end="\r")
|
|
424
|
-
|
|
425
|
-
if not file_entries:
|
|
426
|
-
file_entries = scan_func(src_path)
|
|
427
|
-
else:
|
|
428
|
-
total_count = total_size = None
|
|
429
|
-
file_entries = scan_func(src_path)
|
|
430
|
-
|
|
431
421
|
if quiet:
|
|
422
|
+
progress_bar = False
|
|
423
|
+
verbose = False
|
|
424
|
+
|
|
425
|
+
if not progress_bar:
|
|
432
426
|
callback = callback_after_copy_file = None
|
|
427
|
+
|
|
428
|
+
if verbose:
|
|
429
|
+
|
|
430
|
+
def callback_after_copy_file(src_file_path, dst_file_path):
|
|
431
|
+
print(f"copy {src_file_path} to {dst_file_path} done")
|
|
432
|
+
|
|
433
|
+
file_entries = scan_func(src_path)
|
|
433
434
|
else:
|
|
434
|
-
tbar = tqdm(
|
|
435
|
+
tbar = tqdm(
|
|
436
|
+
total=0,
|
|
437
|
+
ascii=True,
|
|
438
|
+
desc="Files (scaning)",
|
|
439
|
+
)
|
|
435
440
|
sbar = tqdm(
|
|
436
|
-
|
|
441
|
+
total=0,
|
|
437
442
|
ascii=True,
|
|
443
|
+
unit="B",
|
|
438
444
|
unit_scale=True,
|
|
439
445
|
unit_divisor=1024,
|
|
440
|
-
|
|
446
|
+
desc="File size (scaning)",
|
|
441
447
|
)
|
|
442
448
|
|
|
443
|
-
def callback(_filename: str, length: int):
|
|
444
|
-
sbar.update(length)
|
|
445
|
-
|
|
446
449
|
def callback_after_copy_file(src_file_path, dst_file_path):
|
|
450
|
+
if verbose:
|
|
451
|
+
tqdm.write(f"copy {src_file_path} to {dst_file_path} done")
|
|
447
452
|
tbar.update(1)
|
|
448
453
|
|
|
454
|
+
def callback(src_file_path: str, length: int):
|
|
455
|
+
sbar.update(length)
|
|
456
|
+
|
|
457
|
+
file_entry_queue = Queue(maxsize=max_file_object_catch_count)
|
|
458
|
+
|
|
459
|
+
def scan_and_put_file_entry_to_queue():
|
|
460
|
+
for file_entry in scan_func(src_path):
|
|
461
|
+
tbar.total += 1
|
|
462
|
+
sbar.total += file_entry.stat.size
|
|
463
|
+
tbar.refresh()
|
|
464
|
+
sbar.refresh()
|
|
465
|
+
file_entry_queue.put(file_entry)
|
|
466
|
+
file_entry_queue.put(None)
|
|
467
|
+
tbar.set_description_str("Files")
|
|
468
|
+
sbar.set_description_str("File size")
|
|
469
|
+
|
|
470
|
+
executor.submit(scan_and_put_file_entry_to_queue)
|
|
471
|
+
|
|
472
|
+
def get_file_entry_from_queue():
|
|
473
|
+
while True:
|
|
474
|
+
file_entry = file_entry_queue.get()
|
|
475
|
+
if file_entry is None:
|
|
476
|
+
break
|
|
477
|
+
yield file_entry
|
|
478
|
+
|
|
479
|
+
file_entries = get_file_entry_from_queue()
|
|
480
|
+
|
|
449
481
|
params_iter = (
|
|
450
482
|
dict(
|
|
451
483
|
src_root_path=src_root_path,
|
|
@@ -460,10 +492,10 @@ def sync(
|
|
|
460
492
|
for file_entry in file_entries
|
|
461
493
|
)
|
|
462
494
|
list(executor.map(_smart_sync_single_file, params_iter))
|
|
463
|
-
|
|
495
|
+
|
|
496
|
+
if progress_bar:
|
|
497
|
+
sbar.update(sbar.total - sbar.n)
|
|
464
498
|
tbar.close()
|
|
465
|
-
if progress_bar:
|
|
466
|
-
sbar.update(sbar.total - sbar.n)
|
|
467
499
|
sbar.close()
|
|
468
500
|
|
|
469
501
|
|
megfile/errors.py
CHANGED
|
@@ -110,29 +110,41 @@ if hasattr(botocore.exceptions, "ResponseStreamingError"): # backport botocore=
|
|
|
110
110
|
)
|
|
111
111
|
s3_retry_exceptions = tuple(s3_retry_exceptions) # pyre-ignore[9]
|
|
112
112
|
|
|
113
|
+
s3_retry_error_codes = (
|
|
114
|
+
"429", # noqa: E501 # TOS ExceedAccountQPSLimit
|
|
115
|
+
"499", # noqa: E501 # Some cloud providers may send response with http code 499 if the connection not send data in 1 min.
|
|
116
|
+
"500",
|
|
117
|
+
"501",
|
|
118
|
+
"502",
|
|
119
|
+
"503",
|
|
120
|
+
"InternalError",
|
|
121
|
+
"ServiceUnavailable",
|
|
122
|
+
"SlowDown",
|
|
123
|
+
"ContextCanceled",
|
|
124
|
+
"Timeout", # noqa: E501 # TOS Timeout
|
|
125
|
+
"RequestTimeout",
|
|
126
|
+
"RequestTimeTooSkewed",
|
|
127
|
+
"ExceedAccountQPSLimit",
|
|
128
|
+
"ExceedAccountRateLimit",
|
|
129
|
+
"ExceedBucketQPSLimit",
|
|
130
|
+
"ExceedBucketRateLimit",
|
|
131
|
+
"DownloadTrafficRateLimitExceeded", # noqa: E501 # OSS RateLimitExceeded
|
|
132
|
+
"UploadTrafficRateLimitExceeded",
|
|
133
|
+
"MetaOperationQpsLimitExceeded",
|
|
134
|
+
"TotalQpsLimitExceeded",
|
|
135
|
+
"ActiveRequestLimitExceeded",
|
|
136
|
+
"CpuLimitExceeded",
|
|
137
|
+
"QpsLimitExceeded",
|
|
138
|
+
)
|
|
139
|
+
|
|
113
140
|
|
|
114
141
|
def s3_should_retry(error: Exception) -> bool:
|
|
115
142
|
if isinstance(error, s3_retry_exceptions): # pyre-ignore[6]
|
|
116
143
|
return True
|
|
144
|
+
if isinstance(error, botocore.exceptions.SSLError):
|
|
145
|
+
return "EOF" in str(error)
|
|
117
146
|
if isinstance(error, botocore.exceptions.ClientError):
|
|
118
|
-
return client_error_code(error) in
|
|
119
|
-
"429", # noqa: E501 # TOS ExceedAccountQPSLimit
|
|
120
|
-
"499", # noqa: E501 # Some cloud providers may send response with http code 499 if the connection not send data in 1 min.
|
|
121
|
-
"500",
|
|
122
|
-
"501",
|
|
123
|
-
"502",
|
|
124
|
-
"503",
|
|
125
|
-
"InternalError",
|
|
126
|
-
"ServiceUnavailable",
|
|
127
|
-
"SlowDown",
|
|
128
|
-
"ContextCanceled",
|
|
129
|
-
"Timeout", # noqa: E501 # TOS Timeout
|
|
130
|
-
"RequestTimeout",
|
|
131
|
-
"ExceedAccountQPSLimit",
|
|
132
|
-
"ExceedAccountRateLimit",
|
|
133
|
-
"ExceedBucketQPSLimit",
|
|
134
|
-
"ExceedBucketRateLimit",
|
|
135
|
-
)
|
|
147
|
+
return client_error_code(error) in s3_retry_error_codes
|
|
136
148
|
return False
|
|
137
149
|
|
|
138
150
|
|
|
@@ -424,7 +436,7 @@ def raise_s3_error(s3_url: PathLike, suppress_error_callback=None):
|
|
|
424
436
|
|
|
425
437
|
|
|
426
438
|
def s3_error_code_should_retry(error: str) -> bool:
|
|
427
|
-
if error in
|
|
439
|
+
if error in s3_retry_error_codes:
|
|
428
440
|
return True
|
|
429
441
|
return False
|
|
430
442
|
|
megfile/fs.py
CHANGED
|
@@ -1,4 +1,6 @@
|
|
|
1
1
|
import os
|
|
2
|
+
from stat import S_ISDIR as stat_isdir
|
|
3
|
+
from stat import S_ISLNK as stat_islnk
|
|
2
4
|
from typing import BinaryIO, Callable, Iterator, List, Optional, Tuple
|
|
3
5
|
|
|
4
6
|
from megfile.fs_path import (
|
|
@@ -7,7 +9,7 @@ from megfile.fs_path import (
|
|
|
7
9
|
fs_path_join,
|
|
8
10
|
is_fs,
|
|
9
11
|
)
|
|
10
|
-
from megfile.interfaces import Access, FileEntry, PathLike, StatResult
|
|
12
|
+
from megfile.interfaces import Access, ContextIterator, FileEntry, PathLike, StatResult
|
|
11
13
|
|
|
12
14
|
__all__ = [
|
|
13
15
|
"is_fs",
|
|
@@ -169,7 +171,7 @@ def fs_isfile(path: PathLike, followlinks: bool = False) -> bool:
|
|
|
169
171
|
return FSPath(path).is_file(followlinks)
|
|
170
172
|
|
|
171
173
|
|
|
172
|
-
def fs_listdir(path: PathLike) -> List[str]:
|
|
174
|
+
def fs_listdir(path: Optional[PathLike] = None) -> List[str]:
|
|
173
175
|
"""
|
|
174
176
|
Get all contents of given fs path.
|
|
175
177
|
The result is in ascending alphabetical order.
|
|
@@ -177,6 +179,8 @@ def fs_listdir(path: PathLike) -> List[str]:
|
|
|
177
179
|
:param path: Given path
|
|
178
180
|
:returns: All contents have in the path in ascending alphabetical order
|
|
179
181
|
"""
|
|
182
|
+
if path is None:
|
|
183
|
+
return sorted(os.listdir(path))
|
|
180
184
|
return FSPath(path).listdir()
|
|
181
185
|
|
|
182
186
|
|
|
@@ -255,13 +259,34 @@ def fs_scan_stat(
|
|
|
255
259
|
return FSPath(path).scan_stat(missing_ok, followlinks)
|
|
256
260
|
|
|
257
261
|
|
|
258
|
-
def fs_scandir(path: PathLike) -> Iterator[FileEntry]:
|
|
262
|
+
def fs_scandir(path: Optional[PathLike] = None) -> Iterator[FileEntry]:
|
|
259
263
|
"""
|
|
260
264
|
Get all content of given file path.
|
|
261
265
|
|
|
262
266
|
:param path: Given path
|
|
263
267
|
:returns: An iterator contains all contents have prefix path
|
|
264
268
|
"""
|
|
269
|
+
if path is None:
|
|
270
|
+
|
|
271
|
+
def create_generator():
|
|
272
|
+
with os.scandir(None) as entries:
|
|
273
|
+
for entry in entries:
|
|
274
|
+
stat = entry.stat()
|
|
275
|
+
yield FileEntry(
|
|
276
|
+
entry.name,
|
|
277
|
+
entry.path,
|
|
278
|
+
StatResult(
|
|
279
|
+
size=stat.st_size,
|
|
280
|
+
ctime=stat.st_ctime,
|
|
281
|
+
mtime=stat.st_mtime,
|
|
282
|
+
isdir=stat_isdir(stat.st_mode),
|
|
283
|
+
islnk=stat_islnk(stat.st_mode),
|
|
284
|
+
extra=stat,
|
|
285
|
+
),
|
|
286
|
+
)
|
|
287
|
+
|
|
288
|
+
return ContextIterator(create_generator())
|
|
289
|
+
|
|
265
290
|
return FSPath(path).scandir()
|
|
266
291
|
|
|
267
292
|
|
megfile/smart.py
CHANGED
|
@@ -1,8 +1,6 @@
|
|
|
1
1
|
import os
|
|
2
2
|
from collections import defaultdict
|
|
3
3
|
from functools import partial
|
|
4
|
-
from stat import S_ISDIR as stat_isdir
|
|
5
|
-
from stat import S_ISLNK as stat_islnk
|
|
6
4
|
from typing import (
|
|
7
5
|
IO,
|
|
8
6
|
Any,
|
|
@@ -18,10 +16,14 @@ from typing import (
|
|
|
18
16
|
from tqdm import tqdm
|
|
19
17
|
|
|
20
18
|
from megfile.errors import S3UnknownError
|
|
21
|
-
from megfile.fs import
|
|
19
|
+
from megfile.fs import (
|
|
20
|
+
fs_copy,
|
|
21
|
+
fs_listdir,
|
|
22
|
+
fs_scandir,
|
|
23
|
+
is_fs,
|
|
24
|
+
)
|
|
22
25
|
from megfile.interfaces import (
|
|
23
26
|
Access,
|
|
24
|
-
ContextIterator,
|
|
25
27
|
FileCacher,
|
|
26
28
|
FileEntry,
|
|
27
29
|
NullCacher,
|
|
@@ -170,7 +172,7 @@ def smart_listdir(path: Optional[PathLike] = None) -> List[str]:
|
|
|
170
172
|
:raises: FileNotFoundError, NotADirectoryError
|
|
171
173
|
"""
|
|
172
174
|
if path is None:
|
|
173
|
-
return
|
|
175
|
+
return fs_listdir()
|
|
174
176
|
return SmartPath(path).listdir()
|
|
175
177
|
|
|
176
178
|
|
|
@@ -183,25 +185,7 @@ def smart_scandir(path: Optional[PathLike] = None) -> Iterator[FileEntry]:
|
|
|
183
185
|
:raises: FileNotFoundError, NotADirectoryError
|
|
184
186
|
"""
|
|
185
187
|
if path is None:
|
|
186
|
-
|
|
187
|
-
def create_generator():
|
|
188
|
-
with os.scandir(None) as entries:
|
|
189
|
-
for entry in entries:
|
|
190
|
-
stat = entry.stat()
|
|
191
|
-
yield FileEntry(
|
|
192
|
-
entry.name,
|
|
193
|
-
entry.path,
|
|
194
|
-
StatResult(
|
|
195
|
-
size=stat.st_size,
|
|
196
|
-
ctime=stat.st_ctime,
|
|
197
|
-
mtime=stat.st_mtime,
|
|
198
|
-
isdir=stat_isdir(stat.st_mode),
|
|
199
|
-
islnk=stat_islnk(stat.st_mode),
|
|
200
|
-
extra=stat,
|
|
201
|
-
),
|
|
202
|
-
)
|
|
203
|
-
|
|
204
|
-
return ContextIterator(create_generator())
|
|
188
|
+
return fs_scandir()
|
|
205
189
|
return SmartPath(path).scandir()
|
|
206
190
|
|
|
207
191
|
|
|
@@ -404,7 +388,7 @@ def _smart_sync_single_file(items: dict):
|
|
|
404
388
|
force = items["force"]
|
|
405
389
|
overwrite = items["overwrite"]
|
|
406
390
|
|
|
407
|
-
content_path =
|
|
391
|
+
content_path = smart_relpath(src_file_path, start=src_root_path)
|
|
408
392
|
if len(content_path) and content_path != ".":
|
|
409
393
|
content_path = content_path.lstrip("/")
|
|
410
394
|
dst_abs_file_path = smart_path_join(dst_root_path, content_path)
|
|
@@ -438,6 +422,8 @@ def _smart_sync_single_file(items: dict):
|
|
|
438
422
|
callback=copy_callback,
|
|
439
423
|
followlinks=followlinks,
|
|
440
424
|
)
|
|
425
|
+
elif callback:
|
|
426
|
+
callback(src_file_path, src_file_stat.size)
|
|
441
427
|
if callback_after_copy_file:
|
|
442
428
|
callback_after_copy_file(src_file_path, dst_abs_file_path)
|
|
443
429
|
return should_sync
|
|
@@ -580,19 +566,21 @@ def smart_sync_with_progress(
|
|
|
580
566
|
def callback_after_copy_file(src_file_path, dst_file_path):
|
|
581
567
|
tbar.update(1)
|
|
582
568
|
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
569
|
+
try:
|
|
570
|
+
smart_sync(
|
|
571
|
+
src_path,
|
|
572
|
+
dst_path,
|
|
573
|
+
callback=tqdm_callback,
|
|
574
|
+
followlinks=followlinks,
|
|
575
|
+
callback_after_copy_file=callback_after_copy_file,
|
|
576
|
+
src_file_stats=file_stats,
|
|
577
|
+
map_func=map_func,
|
|
578
|
+
force=force,
|
|
579
|
+
overwrite=overwrite,
|
|
580
|
+
)
|
|
581
|
+
finally:
|
|
582
|
+
tbar.close()
|
|
583
|
+
sbar.close()
|
|
596
584
|
|
|
597
585
|
|
|
598
586
|
def smart_remove(path: PathLike, missing_ok: bool = False) -> None:
|
megfile/smart_path.py
CHANGED
megfile/version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
VERSION = "4.1.
|
|
1
|
+
VERSION = "4.1.2"
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
2
|
Name: megfile
|
|
3
|
-
Version: 4.1.
|
|
3
|
+
Version: 4.1.2
|
|
4
4
|
Summary: Megvii file operation library
|
|
5
5
|
Author-email: megvii <megfile@megvii.com>
|
|
6
6
|
Project-URL: Homepage, https://github.com/megvii-research/megfile
|
|
@@ -31,6 +31,7 @@ Provides-Extra: hdfs
|
|
|
31
31
|
Requires-Dist: hdfs; extra == "hdfs"
|
|
32
32
|
Provides-Extra: cli
|
|
33
33
|
Requires-Dist: click; extra == "cli"
|
|
34
|
+
Dynamic: license-file
|
|
34
35
|
|
|
35
36
|
megfile - Megvii FILE library
|
|
36
37
|
---
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
megfile/__init__.py,sha256=7oEfu410CFKzDWZ9RjL5xEJ1gtkJkTfvPrL_7TWdJuY,7366
|
|
2
|
-
megfile/cli.py,sha256=
|
|
2
|
+
megfile/cli.py,sha256=X9nWGke-VM-FCFdRoEoxVH_SNFkKQesBxhSrRv7jAsI,25884
|
|
3
3
|
megfile/config.py,sha256=2MMj5QkhlDJQFZRbCQL2c9iDdeMAVctiaPszRBkg5vM,3988
|
|
4
|
-
megfile/errors.py,sha256
|
|
5
|
-
megfile/fs.py,sha256=
|
|
4
|
+
megfile/errors.py,sha256=-HuHCMj6AUPlXZnTWlUScZPoiw_DS6xx0NKsSuhztxw,14646
|
|
5
|
+
megfile/fs.py,sha256=KMEqAE35alpcxiy6du5nPFYcaorhUM_kPJMah3q76ng,19160
|
|
6
6
|
megfile/fs_path.py,sha256=Hozl9LAJ8EMuSWBSZXGj2GNmPZ1sJp9PZs-7hPrLgm8,39341
|
|
7
7
|
megfile/hdfs.py,sha256=owXr4d3j1frCvlbhmhENcSBnKKDky5cJZzWLOF4ZJMo,13251
|
|
8
8
|
megfile/hdfs_path.py,sha256=OmUe3vA3Qoxnqtcq0Rs3ygBvzAtqUz3fGo8iP5sWneE,26058
|
|
@@ -14,11 +14,11 @@ megfile/s3.py,sha256=abBxnI7RIyn7n7qjGszP1VruYd6Gi9I8QnUOvsHkx1Y,16325
|
|
|
14
14
|
megfile/s3_path.py,sha256=zelXhlRVOVSWBE6HJz0vXrrcRzSuj6Cnjd9HLGwPbCM,93644
|
|
15
15
|
megfile/sftp.py,sha256=uBcLQs-j6Q-q-sWAdd-pgi5Qmb_kq7boJM-0sCfcNO0,26540
|
|
16
16
|
megfile/sftp_path.py,sha256=Wz4VcQ0pBUuWDGMSxPpPbutrT09mnY6jZNiAqTi5tO4,43840
|
|
17
|
-
megfile/smart.py,sha256=
|
|
18
|
-
megfile/smart_path.py,sha256=
|
|
17
|
+
megfile/smart.py,sha256=Sae2KJzaU0k_qV_Bk0YifOMq8WsV5qQ2pGInDRF546I,36411
|
|
18
|
+
megfile/smart_path.py,sha256=HqCOlDwekqqIyJAll-U9YKmaXjjfCGZD5n5aG80lOKw,7592
|
|
19
19
|
megfile/stdio.py,sha256=ZwxsnJNJYIT7Iyg5pIw4qiyH8bszG6oAhEJuR-hXGG4,658
|
|
20
20
|
megfile/stdio_path.py,sha256=cxaDr8rtisTPnN-rjtaEpqQnshwiqwXFUJBM9xWY7Cg,2711
|
|
21
|
-
megfile/version.py,sha256=
|
|
21
|
+
megfile/version.py,sha256=kYS0NJJrKL79xx4qm3iMYFp2WaSU6ttQqSdCIGu0WVA,19
|
|
22
22
|
megfile/lib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
23
23
|
megfile/lib/base_prefetch_reader.py,sha256=uxVwYknOjc8hLF7q_T2QKMsBqFcrf411ZsuK25CN1eQ,12848
|
|
24
24
|
megfile/lib/combine_reader.py,sha256=Kp2wEloOUpTlIU7dve87MBpSzmIM-F9OtpTawAjFkiU,4828
|
|
@@ -43,6 +43,8 @@ megfile/lib/stdio_handler.py,sha256=IDdgENLQlhigEwkLL4zStueVSzdWg7xVcTF_koof_Ek,
|
|
|
43
43
|
megfile/lib/url.py,sha256=ER32pWy9Q2MAk3TraAaNEBWIqUeBmLuM57ol2cs7-Ks,103
|
|
44
44
|
megfile/utils/__init__.py,sha256=xAzmICA0MtAbg-I2yPfeHjA1N4CiMP4sBrC9BgrfZLw,10151
|
|
45
45
|
megfile/utils/mutex.py,sha256=asb8opGLgK22RiuBJUnfsvB8LnMmodP8KzCVHKmQBWA,2561
|
|
46
|
+
megfile-4.1.2.dist-info/licenses/LICENSE,sha256=WNHhf_5RCaeuKWyq_K39vmp9F28LxKsB4SpomwSZ2L0,11357
|
|
47
|
+
megfile-4.1.2.dist-info/licenses/LICENSE.pyre,sha256=9lf5nT-5ZH25JijpYAequ0bl8E8z5JmZB1qrjiUMp84,1080
|
|
46
48
|
scripts/benchmark/code/iopath_read.py,sha256=O1Qs3mpvD9S_XCuRH2A2FpGWxCOSw6qZvEBrtPeRL1E,661
|
|
47
49
|
scripts/benchmark/code/iopath_write.py,sha256=Mm0efW1J09RJ_CK5i1xtG2hJuaaslikin8qVpuRFP_Q,704
|
|
48
50
|
scripts/benchmark/code/megfile_read.py,sha256=sAMebUiMColHDv3JEkXplImAHvn_IF1-g3BIJxhcQYE,239
|
|
@@ -53,10 +55,8 @@ scripts/benchmark/code/s3fs_read.py,sha256=XiTA-qrYblUs-jQWXSnvNg5Wo722C_g47aMMf
|
|
|
53
55
|
scripts/benchmark/code/s3fs_write.py,sha256=gdXKkWXYGjLJlRT_J64pJN85XvRg3bZexcAJQEMXwtw,402
|
|
54
56
|
scripts/benchmark/code/smart_open_read.py,sha256=SA02jHwS9Y31yFtV9CoJcfND5dR0eA_HsGmGNUrpQls,515
|
|
55
57
|
scripts/benchmark/code/smart_open_write.py,sha256=jDxFJdY97yNH889jz3pawBoei3yaqy8pEMvC_ymHFtM,537
|
|
56
|
-
megfile-4.1.
|
|
57
|
-
megfile-4.1.
|
|
58
|
-
megfile-4.1.
|
|
59
|
-
megfile-4.1.
|
|
60
|
-
megfile-4.1.
|
|
61
|
-
megfile-4.1.0.post3.dist-info/top_level.txt,sha256=fVg49lk5B9L7jyfWUXWxb0DDSuw5pbr0OU62Tvx8J8M,44
|
|
62
|
-
megfile-4.1.0.post3.dist-info/RECORD,,
|
|
58
|
+
megfile-4.1.2.dist-info/METADATA,sha256=7Bxhk9yqd8P_kSNqVhvqCP1yif4ScV2x63YE6vzsznQ,9595
|
|
59
|
+
megfile-4.1.2.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
|
60
|
+
megfile-4.1.2.dist-info/entry_points.txt,sha256=M6ZWSSv5_5_QtIpZafy3vq7WuOJ_5dSGQQnEZbByt2Q,49
|
|
61
|
+
megfile-4.1.2.dist-info/top_level.txt,sha256=TR6xUw8Px5Ms_UENhEmLNmxOdfHAhTlSt9yTw9LRgsQ,35
|
|
62
|
+
megfile-4.1.2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|