megfile 3.1.6.post1__py3-none-any.whl → 4.0.0.post1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. megfile/cli.py +12 -7
  2. megfile/config.py +27 -39
  3. megfile/fs.py +169 -12
  4. megfile/fs_path.py +183 -260
  5. megfile/hdfs.py +106 -5
  6. megfile/hdfs_path.py +34 -90
  7. megfile/http.py +50 -1
  8. megfile/http_path.py +27 -65
  9. megfile/interfaces.py +1 -8
  10. megfile/lib/base_prefetch_reader.py +62 -78
  11. megfile/lib/combine_reader.py +5 -0
  12. megfile/lib/glob.py +3 -6
  13. megfile/lib/hdfs_prefetch_reader.py +7 -7
  14. megfile/lib/http_prefetch_reader.py +6 -6
  15. megfile/lib/s3_buffered_writer.py +71 -65
  16. megfile/lib/s3_cached_handler.py +1 -2
  17. megfile/lib/s3_limited_seekable_writer.py +3 -7
  18. megfile/lib/s3_memory_handler.py +1 -2
  19. megfile/lib/s3_pipe_handler.py +1 -2
  20. megfile/lib/s3_prefetch_reader.py +10 -19
  21. megfile/lib/s3_share_cache_reader.py +8 -5
  22. megfile/pathlike.py +397 -401
  23. megfile/s3.py +118 -17
  24. megfile/s3_path.py +126 -209
  25. megfile/sftp.py +300 -10
  26. megfile/sftp_path.py +46 -322
  27. megfile/smart.py +33 -27
  28. megfile/smart_path.py +9 -14
  29. megfile/stdio.py +1 -1
  30. megfile/stdio_path.py +2 -2
  31. megfile/utils/__init__.py +3 -4
  32. megfile/version.py +1 -1
  33. {megfile-3.1.6.post1.dist-info → megfile-4.0.0.post1.dist-info}/METADATA +7 -7
  34. megfile-4.0.0.post1.dist-info/RECORD +52 -0
  35. {megfile-3.1.6.post1.dist-info → megfile-4.0.0.post1.dist-info}/WHEEL +1 -1
  36. {megfile-3.1.6.post1.dist-info → megfile-4.0.0.post1.dist-info}/top_level.txt +0 -2
  37. docs/conf.py +0 -65
  38. megfile-3.1.6.post1.dist-info/RECORD +0 -55
  39. scripts/convert_results_to_sarif.py +0 -91
  40. scripts/generate_file.py +0 -344
  41. {megfile-3.1.6.post1.dist-info → megfile-4.0.0.post1.dist-info}/LICENSE +0 -0
  42. {megfile-3.1.6.post1.dist-info → megfile-4.0.0.post1.dist-info}/LICENSE.pyre +0 -0
  43. {megfile-3.1.6.post1.dist-info → megfile-4.0.0.post1.dist-info}/entry_points.txt +0 -0
megfile/s3.py CHANGED
@@ -2,12 +2,7 @@ from typing import BinaryIO, Callable, Iterator, List, Optional, Tuple
2
2
 
3
3
  from megfile.interfaces import Access, FileEntry, PathLike, StatResult
4
4
  from megfile.s3_path import (
5
- S3BufferedWriter,
6
- S3Cacher,
7
- S3LimitedSeekableWriter,
8
5
  S3Path,
9
- S3PrefetchReader,
10
- S3ShareCacheReader,
11
6
  get_endpoint_url,
12
7
  get_s3_client,
13
8
  get_s3_session,
@@ -17,19 +12,12 @@ from megfile.s3_path import (
17
12
  s3_cached_open,
18
13
  s3_concat,
19
14
  s3_download,
20
- s3_glob,
21
- s3_glob_stat,
22
- s3_iglob,
23
15
  s3_load_content,
24
- s3_lstat,
25
- s3_makedirs,
26
16
  s3_memory_open,
27
17
  s3_open,
28
18
  s3_path_join,
29
19
  s3_pipe_open,
30
20
  s3_prefetch_open,
31
- s3_readlink,
32
- s3_rename,
33
21
  s3_share_cache_open,
34
22
  s3_upload,
35
23
  )
@@ -48,11 +36,6 @@ __all__ = [
48
36
  "s3_prefetch_open",
49
37
  "s3_share_cache_open",
50
38
  "s3_open",
51
- "S3Cacher",
52
- "S3BufferedWriter",
53
- "S3LimitedSeekableWriter",
54
- "S3PrefetchReader",
55
- "S3ShareCacheReader",
56
39
  "s3_upload",
57
40
  "s3_download",
58
41
  "s3_load_content",
@@ -443,3 +426,121 @@ def s3_save_as(file_object: BinaryIO, path: PathLike):
443
426
  :param file_object: Stream to be read
444
427
  """
445
428
  return S3Path(path).save(file_object)
429
+
430
+
431
+ def s3_readlink(path) -> str:
432
+ """
433
+ Return a string representing the path to which the symbolic link points.
434
+
435
+ :returns: Return a string representing the path to which the symbolic link points.
436
+ :raises: S3NameTooLongError, S3BucketNotFoundError, S3IsADirectoryError,
437
+ S3NotALinkError
438
+ """
439
+ return S3Path(path).readlink().path_with_protocol
440
+
441
+
442
+ def s3_rename(src_url: PathLike, dst_url: PathLike, overwrite: bool = True) -> None:
443
+ """
444
+ Move s3 file path from src_url to dst_url
445
+
446
+ :param dst_url: Given destination path
447
+ :param overwrite: whether or not overwrite file when exists
448
+ """
449
+ S3Path(src_url).rename(dst_url, overwrite)
450
+
451
+
452
+ def s3_glob(
453
+ path: PathLike,
454
+ recursive: bool = True,
455
+ missing_ok: bool = True,
456
+ followlinks: bool = False,
457
+ ) -> List[str]:
458
+ """Return s3 path list in ascending alphabetical order,
459
+ in which path matches glob pattern
460
+
461
+ Notes: Only glob in bucket. If trying to match bucket with wildcard characters,
462
+ raise UnsupportedError
463
+
464
+ :param recursive: If False, `**` will not search directory recursively
465
+ :param missing_ok: If False and target path doesn't match any file,
466
+ raise FileNotFoundError
467
+ :raises: UnsupportedError, when bucket part contains wildcard characters
468
+ :returns: A list contains paths match `s3_pathname`
469
+ """
470
+ return list(
471
+ s3_iglob(
472
+ path=path,
473
+ recursive=recursive,
474
+ missing_ok=missing_ok,
475
+ followlinks=followlinks,
476
+ )
477
+ )
478
+
479
+
480
+ def s3_glob_stat(
481
+ path: PathLike,
482
+ recursive: bool = True,
483
+ missing_ok: bool = True,
484
+ followlinks: bool = False,
485
+ ) -> Iterator[FileEntry]:
486
+ """Return a generator contains tuples of path and file stat,
487
+ in ascending alphabetical order, in which path matches glob pattern
488
+
489
+ Notes: Only glob in bucket. If trying to match bucket with wildcard characters,
490
+ raise UnsupportedError
491
+
492
+ :param recursive: If False, `**` will not search directory recursively
493
+ :param missing_ok: If False and target path doesn't match any file,
494
+ raise FileNotFoundError
495
+ :raises: UnsupportedError, when bucket part contains wildcard characters
496
+ :returns: A generator contains tuples of path and file stat,
497
+ in which paths match `s3_pathname`
498
+ """
499
+ return S3Path(path).glob_stat(
500
+ pattern="", recursive=recursive, missing_ok=missing_ok, followlinks=followlinks
501
+ )
502
+
503
+
504
+ def s3_iglob(
505
+ path: PathLike,
506
+ recursive: bool = True,
507
+ missing_ok: bool = True,
508
+ followlinks: bool = False,
509
+ ) -> Iterator[str]:
510
+ """Return s3 path iterator in ascending alphabetical order,
511
+ in which path matches glob pattern
512
+
513
+ Notes: Only glob in bucket. If trying to match bucket with wildcard characters,
514
+ raise UnsupportedError
515
+
516
+ :param recursive: If False, `**` will not search directory recursively
517
+ :param missing_ok: If False and target path doesn't match any file,
518
+ raise FileNotFoundError
519
+ :raises: UnsupportedError, when bucket part contains wildcard characters
520
+ :returns: An iterator contains paths match `s3_pathname`
521
+ """
522
+ for path_obj in S3Path(path).iglob(
523
+ pattern="", recursive=recursive, missing_ok=missing_ok, followlinks=followlinks
524
+ ):
525
+ yield path_obj.path_with_protocol
526
+
527
+
528
+ def s3_makedirs(path: PathLike, exist_ok: bool = False):
529
+ """
530
+ Create an s3 directory.
531
+ Purely creating directory is invalid because it's unavailable on OSS.
532
+ This function is to test the target bucket have WRITE access.
533
+
534
+ :param path: Given path
535
+ :param exist_ok: If False and target directory exists, raise S3FileExistsError
536
+ :raises: S3BucketNotFoundError, S3FileExistsError
537
+ """
538
+ return S3Path(path).mkdir(parents=True, exist_ok=exist_ok)
539
+
540
+
541
+ def s3_lstat(path: PathLike) -> StatResult:
542
+ """
543
+ Like Path.stat() but, if the path points to a symbolic link,
544
+ return the symbolic link’s information rather than its target’s.
545
+ """
546
+ return S3Path(path).lstat()