megfile 4.0.2__py3-none-any.whl → 4.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
megfile/cli.py CHANGED
@@ -46,6 +46,7 @@ from megfile.version import VERSION
46
46
 
47
47
  options = {}
48
48
  set_log_level()
49
+ max_file_object_catch_count = 1024 * 128
49
50
 
50
51
 
51
52
  @click.group()
@@ -124,12 +125,15 @@ def _sftp_prompt_host_key(path):
124
125
  hostname = (
125
126
  path.pathlike._urlsplit_parts.hostname # pytype: disable=attribute-error
126
127
  )
127
- port = path.pathlike._urlsplit_parts.port # pytype: disable=attribute-error
128
- sftp_add_host_key(
129
- hostname=hostname,
130
- port=port,
131
- prompt=True,
128
+ port = (
129
+ path.pathlike._urlsplit_parts.port or 22 # pytype: disable=attribute-error
132
130
  )
131
+ if hostname:
132
+ sftp_add_host_key(
133
+ hostname=hostname,
134
+ port=port,
135
+ prompt=True,
136
+ )
133
137
 
134
138
 
135
139
  def _ls(path: str, long: bool, recursive: bool, human_readable: bool):
@@ -161,11 +165,7 @@ def _ls(path: str, long: bool, recursive: bool, human_readable: bool):
161
165
  total_count += 1
162
166
  output = echo_func(file_stat, base_path, full_path=full_path)
163
167
  if file_stat.is_symlink():
164
- try:
165
- link = smart_readlink(file_stat.path)
166
- except FileNotFoundError as e:
167
- link = repr(e)
168
- output += " -> %s" % link
168
+ output += " -> %s" % smart_readlink(file_stat.path)
169
169
  click.echo(output)
170
170
  if long:
171
171
  click.echo(f"total({total_count}): {get_human_size(total_size)}")
@@ -417,7 +417,7 @@ def sync(
417
417
  file_entries = []
418
418
  total_count = total_size = 0
419
419
  for total_count, file_entry in enumerate(scan_func(src_path), start=1):
420
- if total_count > 1024 * 128:
420
+ if total_count > max_file_object_catch_count:
421
421
  file_entries = []
422
422
  else:
423
423
  file_entries.append(file_entry)
@@ -506,15 +506,21 @@ def head(path: str, lines: int):
506
506
 
507
507
  with smart_open(path, "rb") as f:
508
508
  for _ in range(lines):
509
- try:
510
- content = f.readline()
511
- if not content:
512
- break
513
- except EOFError:
509
+ content = f.readline()
510
+ if not content:
514
511
  break
515
512
  click.echo(content.strip(b"\n"))
516
513
 
517
514
 
515
+ def _tail_follow_content(path, offset):
516
+ with smart_open(path, "rb") as f:
517
+ f.seek(offset)
518
+ for line in f.readlines():
519
+ click.echo(line, nl=False)
520
+ offset = f.tell()
521
+ return offset
522
+
523
+
518
524
  @cli.command(
519
525
  short_help="Concatenate any files and send last n lines of them to stdout."
520
526
  )
@@ -554,17 +560,14 @@ def tail(path: str, lines: int, follow: bool):
554
560
  if line_list:
555
561
  click.echo(line_list[-1], nl=False)
556
562
 
557
- if follow:
563
+ if follow: # pragma: no cover
558
564
  offset = file_size
559
565
  while True:
560
- with smart_open(path, "rb") as f:
561
- f.seek(offset)
562
- line = f.readline()
563
- offset = f.tell()
564
- if not line:
565
- time.sleep(1)
566
- continue
567
- click.echo(line, nl=False)
566
+ new_offset = _tail_follow_content(path, offset)
567
+ if new_offset == offset:
568
+ time.sleep(1)
569
+ else:
570
+ offset = new_offset
568
571
 
569
572
 
570
573
  @cli.command(short_help="Write bytes from stdin to file.")
megfile/config.py CHANGED
@@ -46,18 +46,13 @@ def parse_quantity(quantity: T.Union[str, int]) -> int:
46
46
 
47
47
  if suffix.endswith("i"):
48
48
  base = 1024
49
- elif len(suffix) == 1:
50
- base = 1000
51
49
  else:
52
- raise ValueError("{} has unknown suffix".format(quantity))
50
+ base = 1000
53
51
 
54
52
  # handle SI inconsistency
55
53
  if suffix == "ki":
56
54
  raise ValueError("{} has unknown suffix".format(quantity))
57
55
 
58
- if suffix[0] not in exponents:
59
- raise ValueError("{} has unknown suffix".format(quantity))
60
-
61
56
  exponent = int(exponents[suffix[0]])
62
57
  return number * (base**exponent) # pytype: disable=bad-return-type
63
58
 
megfile/errors.py CHANGED
@@ -177,17 +177,19 @@ def patch_method(
177
177
 
178
178
  def _create_missing_ok_generator(generator, missing_ok: bool, error: Exception):
179
179
  if missing_ok:
180
- yield from generator
181
- return
182
-
183
- zero_elem = True
184
- for item in generator:
185
- zero_elem = False
186
- yield item
180
+ return generator
187
181
 
188
- if zero_elem:
182
+ try:
183
+ first = next(generator)
184
+ except StopIteration:
189
185
  raise error
190
186
 
187
+ def create_generator():
188
+ yield first
189
+ yield from generator
190
+
191
+ return create_generator()
192
+
191
193
 
192
194
  class UnknownError(Exception):
193
195
  def __init__(self, error: Exception, path: PathLike, extra: Optional[str] = None):
@@ -411,11 +413,14 @@ def translate_http_error(http_error: Exception, http_url: str) -> Exception:
411
413
 
412
414
 
413
415
  @contextmanager
414
- def raise_s3_error(s3_url: PathLike):
416
+ def raise_s3_error(s3_url: PathLike, suppress_errors=()):
415
417
  try:
416
418
  yield
417
419
  except Exception as error:
418
- raise translate_s3_error(error, s3_url)
420
+ error = translate_s3_error(error, s3_url)
421
+ if suppress_errors and isinstance(error, suppress_errors):
422
+ return
423
+ raise error
419
424
 
420
425
 
421
426
  def s3_error_code_should_retry(error: str) -> bool:
megfile/fs_path.py CHANGED
@@ -469,6 +469,8 @@ class FSPath(URIPath):
469
469
  src_path, dst_path = fspath(self.path_without_protocol), fspath(dst_path)
470
470
  if os.path.isfile(src_path):
471
471
  _fs_rename_file(src_path, dst_path, overwrite)
472
+ if os.path.exists(src_path):
473
+ os.remove(src_path)
472
474
  return self.from_path(dst_path)
473
475
  else:
474
476
  os.makedirs(dst_path, exist_ok=True)
@@ -485,10 +487,7 @@ class FSPath(URIPath):
485
487
  else:
486
488
  _fs_rename_file(src_file_path, dst_file_path, overwrite)
487
489
 
488
- if os.path.isdir(src_path):
489
- shutil.rmtree(src_path)
490
- else:
491
- os.remove(src_path)
490
+ shutil.rmtree(src_path, ignore_errors=True)
492
491
 
493
492
  return self.from_path(dst_path)
494
493
 
@@ -518,6 +517,8 @@ class FSPath(URIPath):
518
517
  def _scan(
519
518
  self, missing_ok: bool = True, followlinks: bool = False
520
519
  ) -> Iterator[str]:
520
+ self._check_int_path()
521
+
521
522
  if self.is_file(followlinks=followlinks):
522
523
  path = fspath(self.path_without_protocol)
523
524
  yield path
@@ -573,6 +574,7 @@ class FSPath(URIPath):
573
574
 
574
575
  :returns: An iterator contains all contents have prefix path
575
576
  """
577
+ self._check_int_path()
576
578
 
577
579
  def create_generator():
578
580
  with os.scandir(self.path_without_protocol) as entries:
@@ -740,9 +742,8 @@ class FSPath(URIPath):
740
742
  if not buf:
741
743
  break
742
744
  fdst.write(buf)
743
- if callback is None:
744
- continue
745
- callback(len(buf))
745
+ if callback:
746
+ callback(len(buf))
746
747
  else:
747
748
  shutil.copy2(
748
749
  self.path_without_protocol, # pyre-ignore[6]
@@ -814,15 +815,15 @@ class FSPath(URIPath):
814
815
  priority is higher than 'overwrite', default is False
815
816
  :param overwrite: whether or not overwrite file when exists, default is True
816
817
  """
818
+ self._check_int_path()
819
+
817
820
  if self.is_dir(followlinks=followlinks):
818
821
 
819
822
  def ignore_same_file(src: str, names: List[str]) -> List[str]:
820
823
  ignore_files = []
821
824
  for name in names:
822
825
  dst_obj = self.from_path(dst_path).joinpath(name)
823
- if force:
824
- pass
825
- elif not overwrite and dst_obj.exists():
826
+ if not overwrite and dst_obj.exists():
826
827
  ignore_files.append(name)
827
828
  elif dst_obj.exists() and is_same_file(
828
829
  self.joinpath(name).stat(), dst_obj.stat(), "copy"
@@ -833,11 +834,11 @@ class FSPath(URIPath):
833
834
  shutil.copytree(
834
835
  self.path_without_protocol, # pyre-ignore[6]
835
836
  dst_path,
836
- ignore=ignore_same_file,
837
+ ignore=None if force else ignore_same_file,
837
838
  dirs_exist_ok=True,
838
839
  )
839
840
  else:
840
- self.copy(dst_path, followlinks=followlinks, overwrite=overwrite)
841
+ self.copy(dst_path, followlinks=followlinks, overwrite=force or overwrite)
841
842
 
842
843
  def symlink(self, dst_path: PathLike) -> None:
843
844
  """
@@ -893,6 +894,8 @@ class FSPath(URIPath):
893
894
  return self.from_path(os.path.expanduser("~"))
894
895
 
895
896
  def joinpath(self, *other_paths: PathLike) -> "FSPath":
897
+ self._check_int_path()
898
+
896
899
  path = fspath(self)
897
900
  if path == ".":
898
901
  path = ""
megfile/hdfs_path.py CHANGED
@@ -372,7 +372,10 @@ class HdfsPath(URIPath):
372
372
  dst_path = self.from_path(dst_path)
373
373
  if self.is_dir():
374
374
  for filename in self.iterdir():
375
- self.joinpath(filename).rename(dst_path.joinpath(filename))
375
+ filename.rename(
376
+ dst_path.joinpath(filename.relative_to(self.path_with_protocol)),
377
+ overwrite=overwrite,
378
+ )
376
379
  else:
377
380
  if overwrite:
378
381
  dst_path.remove(missing_ok=True)
megfile/http_path.py CHANGED
@@ -96,14 +96,7 @@ def get_http_session(
96
96
  file_info.seek(0)
97
97
  elif isinstance(file_info, (tuple, list)) and len(file_info) >= 2:
98
98
  file_info = list(file_info)
99
- if (
100
- isinstance(file_info[1], (tuple, list))
101
- and len(file_info[1]) >= 2
102
- ):
103
- file_info[1] = list(file_info[1])
104
- file_info[1] = seek_or_reopen(file_info[1])
105
- else:
106
- file_info[1] = seek_or_reopen(file_info[1])
99
+ file_info[1] = seek_or_reopen(file_info[1])
107
100
  files[key] = file_info
108
101
 
109
102
  session.request = patch_method(
@@ -64,7 +64,7 @@ class S3PrefetchReader(BasePrefetchReader):
64
64
  def _get_content_size(self):
65
65
  if self._block_capacity <= 0:
66
66
  response = self._client.head_object(Bucket=self._bucket, Key=self._key)
67
- self._content_etag = response["ETag"]
67
+ self._content_etag = response.get("ETag")
68
68
  return int(response["ContentLength"])
69
69
 
70
70
  try:
@@ -84,7 +84,7 @@ class S3PrefetchReader(BasePrefetchReader):
84
84
  first_future = Future()
85
85
  first_future.set_result(first_index_response["Body"])
86
86
  self._insert_futures(index=0, future=first_future)
87
- self._content_etag = first_index_response["ETag"]
87
+ self._content_etag = first_index_response.get("ETag")
88
88
  return content_size
89
89
 
90
90
  @property
@@ -120,7 +120,7 @@ class S3PrefetchReader(BasePrefetchReader):
120
120
  start, end = index * self._block_size, (index + 1) * self._block_size - 1
121
121
  response = self._fetch_response(start=start, end=end)
122
122
  etag = response.get("ETag", None)
123
- if etag is not None and etag != self._content_etag:
123
+ if self._content_etag and etag and etag != self._content_etag:
124
124
  raise S3FileChangedError(
125
125
  "File changed: %r, etag before: %s, after: %s"
126
126
  % (self.name, self._content_etag, etag)
megfile/s3.py CHANGED
@@ -161,7 +161,9 @@ def s3_isfile(path: PathLike, followlinks: bool = False) -> bool:
161
161
  return S3Path(path).is_file(followlinks)
162
162
 
163
163
 
164
- def s3_listdir(path: PathLike, followlinks: bool = False) -> List[str]:
164
+ def s3_listdir(
165
+ path: PathLike, followlinks: bool = False, missing_ok: bool = True
166
+ ) -> List[str]:
165
167
  """
166
168
  Get all contents of given s3_url. The result is in ascending alphabetical order.
167
169
 
@@ -169,7 +171,7 @@ def s3_listdir(path: PathLike, followlinks: bool = False) -> List[str]:
169
171
  :returns: All contents have prefix of s3_url in ascending alphabetical order
170
172
  :raises: S3FileNotFoundError, S3NotADirectoryError
171
173
  """
172
- return S3Path(path).listdir(followlinks)
174
+ return S3Path(path).listdir(followlinks, missing_ok)
173
175
 
174
176
 
175
177
  def s3_load_from(path: PathLike, followlinks: bool = False) -> BinaryIO:
@@ -260,7 +262,9 @@ def s3_scan_stat(
260
262
  return S3Path(path).scan_stat(missing_ok, followlinks)
261
263
 
262
264
 
263
- def s3_scandir(path: PathLike, followlinks: bool = False) -> Iterator[FileEntry]:
265
+ def s3_scandir(
266
+ path: PathLike, followlinks: bool = False, missing_ok: bool = True
267
+ ) -> Iterator[FileEntry]:
264
268
  """
265
269
  Get all contents of given s3_url, the order of result is not guaranteed.
266
270
 
@@ -268,7 +272,7 @@ def s3_scandir(path: PathLike, followlinks: bool = False) -> Iterator[FileEntry]
268
272
  :returns: All contents have prefix of s3_url
269
273
  :raises: S3FileNotFoundError, S3NotADirectoryError
270
274
  """
271
- return S3Path(path).scandir(followlinks)
275
+ return S3Path(path).scandir(followlinks, missing_ok)
272
276
 
273
277
 
274
278
  def s3_stat(path: PathLike, follow_symlinks=True) -> StatResult:
megfile/s3_path.py CHANGED
@@ -1702,24 +1702,26 @@ class S3Path(URIPath):
1702
1702
  return False
1703
1703
  return True
1704
1704
 
1705
- def listdir(self, followlinks: bool = False) -> List[str]:
1705
+ def listdir(self, followlinks: bool = False, missing_ok: bool = True) -> List[str]:
1706
1706
  """
1707
1707
  Get all contents of given s3_url. The result is in ascending alphabetical order.
1708
1708
 
1709
1709
  :returns: All contents have prefix of s3_url in ascending alphabetical order
1710
1710
  :raises: S3FileNotFoundError, S3NotADirectoryError
1711
1711
  """
1712
- entries = list(self.scandir(followlinks=followlinks))
1712
+ entries = list(self.scandir(followlinks=followlinks, missing_ok=missing_ok))
1713
1713
  return sorted([entry.name for entry in entries])
1714
1714
 
1715
- def iterdir(self, followlinks: bool = False) -> Iterator["S3Path"]:
1715
+ def iterdir(
1716
+ self, followlinks: bool = False, missing_ok: bool = True
1717
+ ) -> Iterator["S3Path"]:
1716
1718
  """
1717
1719
  Get all contents of given s3_url. The result is in ascending alphabetical order.
1718
1720
 
1719
1721
  :returns: All contents have prefix of s3_url in ascending alphabetical order
1720
1722
  :raises: S3FileNotFoundError, S3NotADirectoryError
1721
1723
  """
1722
- for path in self.listdir(followlinks=followlinks):
1724
+ for path in self.listdir(followlinks=followlinks, missing_ok=missing_ok):
1723
1725
  yield self.joinpath(path)
1724
1726
 
1725
1727
  def load(self, followlinks: bool = False) -> BinaryIO:
@@ -1959,17 +1961,8 @@ class S3Path(URIPath):
1959
1961
  raise UnsupportedError("Scan whole s3", self.path_with_protocol)
1960
1962
 
1961
1963
  def create_generator() -> Iterator[FileEntry]:
1962
- if not self.is_dir():
1963
- if self.is_file():
1964
- # On s3, file and directory may be of same name and level, so need
1965
- # to test the path is file or directory
1966
- yield FileEntry(
1967
- self.name,
1968
- fspath(self.path_with_protocol),
1969
- self.stat(follow_symlinks=followlinks),
1970
- )
1971
- return
1972
-
1964
+ # On s3, file and directory may be of same name and level, so need
1965
+ # to test the path is file or directory
1973
1966
  if not key.endswith("/") and self.is_file():
1974
1967
  yield FileEntry(
1975
1968
  self.name,
@@ -1979,7 +1972,8 @@ class S3Path(URIPath):
1979
1972
 
1980
1973
  prefix = _become_prefix(key)
1981
1974
  client = self._client
1982
- with raise_s3_error(self.path_with_protocol):
1975
+ suppress_errors = S3BucketNotFoundError if missing_ok else ()
1976
+ with raise_s3_error(self.path_with_protocol, suppress_errors):
1983
1977
  for resp in _list_objects_recursive(client, bucket, prefix):
1984
1978
  for content in resp.get("Contents", []):
1985
1979
  full_path = s3_path_join(
@@ -2008,7 +2002,9 @@ class S3Path(URIPath):
2008
2002
  S3FileNotFoundError("No match any file in: %r" % self.path_with_protocol),
2009
2003
  )
2010
2004
 
2011
- def scandir(self, followlinks: bool = False) -> Iterator[FileEntry]:
2005
+ def scandir(
2006
+ self, followlinks: bool = False, missing_ok: bool = False
2007
+ ) -> Iterator[FileEntry]:
2012
2008
  """
2013
2009
  Get all contents of given s3_url, the order of result is not guaranteed.
2014
2010
 
@@ -2023,15 +2019,14 @@ class S3Path(URIPath):
2023
2019
 
2024
2020
  if self.is_file():
2025
2021
  raise S3NotADirectoryError("Not a directory: %r" % self.path_with_protocol)
2026
- elif not self.is_dir():
2027
- raise S3FileNotFoundError("No such directory: %r" % self.path_with_protocol)
2028
- prefix = _become_prefix(key)
2029
- client = self._client
2030
2022
 
2031
2023
  # In order to do check on creation,
2032
2024
  # we need to wrap the iterator in another function
2033
2025
  def create_generator() -> Iterator[FileEntry]:
2034
- with raise_s3_error(self.path_with_protocol):
2026
+ suppress_errors = S3BucketNotFoundError if missing_ok else ()
2027
+ with raise_s3_error(self.path_with_protocol, suppress_errors):
2028
+ prefix = _become_prefix(key)
2029
+ client = self._client
2035
2030
 
2036
2031
  def generate_s3_path(protocol: str, bucket: str, key: str) -> str:
2037
2032
  return "%s://%s/%s" % (protocol, bucket, key)
@@ -2082,7 +2077,13 @@ class S3Path(URIPath):
2082
2077
  content["Key"][len(prefix) :], src_url, _make_stat(content)
2083
2078
  )
2084
2079
 
2085
- return ContextIterator(create_generator())
2080
+ return ContextIterator(
2081
+ _create_missing_ok_generator(
2082
+ create_generator(),
2083
+ missing_ok,
2084
+ S3FileNotFoundError("No such directory: %r" % self.path_with_protocol),
2085
+ )
2086
+ )
2086
2087
 
2087
2088
  def _get_dir_stat(self) -> StatResult:
2088
2089
  """
@@ -2362,7 +2363,11 @@ class S3Path(URIPath):
2362
2363
  ):
2363
2364
  continue
2364
2365
 
2365
- src_file_path.copy(dst_file_path, followlinks=followlinks)
2366
+ src_file_path.copy(
2367
+ dst_file_path,
2368
+ followlinks=followlinks,
2369
+ overwrite=True,
2370
+ )
2366
2371
 
2367
2372
  def symlink(self, dst_path: PathLike) -> None:
2368
2373
  """
megfile/sftp_path.py CHANGED
@@ -1173,7 +1173,11 @@ class SftpPath(URIPath):
1173
1173
  ):
1174
1174
  continue
1175
1175
 
1176
- self.from_path(src_file_path).copy(dst_file_path, followlinks=followlinks)
1176
+ src_path.copy(
1177
+ dst_file_path,
1178
+ followlinks=followlinks,
1179
+ overwrite=True,
1180
+ )
1177
1181
 
1178
1182
  def utime(self, atime: Union[float, int], mtime: Union[float, int]) -> None:
1179
1183
  """
megfile/utils/__init__.py CHANGED
@@ -280,24 +280,8 @@ class classproperty(property):
280
280
  # apply the __get__ on the class
281
281
  return super(classproperty, self).__get__(cls)
282
282
 
283
- def __set__(self, cls_or_obj, value: object) -> None:
284
- """
285
- This method gets called when a property value should be set.
286
- @param cls_or_obj: The class or instance of which the property should be
287
- changed.
288
- @param value: The new value.
289
- """
290
- # call this method only on the class, not the instance
291
- super(classproperty, self).__set__(_get_class(cls_or_obj), value)
292
-
293
- def __delete__(self, cls_or_obj) -> None:
294
- """
295
- This method gets called when a property should be deleted.
296
- @param cls_or_obj: The class or instance of which the property should be
297
- deleted.
298
- """
299
- # call this method only on the class, not the instance
300
- super(classproperty, self).__delete__(_get_class(cls_or_obj))
283
+ # __set__ and __delete__ only work for instance
284
+ # classproperty only support __get__
301
285
 
302
286
 
303
287
  class cached_classproperty(cached_property):
@@ -333,7 +317,7 @@ class cached_classproperty(cached_property):
333
317
  @return: The value of the property.
334
318
  """
335
319
  if self.attrname is None:
336
- raise TypeError(
320
+ raise TypeError( # pragma: no cover
337
321
  "Cannot use cached_classproperty instance without calling "
338
322
  "__set_name__ on it."
339
323
  )
megfile/version.py CHANGED
@@ -1 +1 @@
1
- VERSION = "4.0.2"
1
+ VERSION = "4.0.4"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: megfile
3
- Version: 4.0.2
3
+ Version: 4.0.4
4
4
  Summary: Megvii file operation library
5
5
  Author-email: megvii <megfile@megvii.com>
6
6
  Project-URL: Homepage, https://github.com/megvii-research/megfile
@@ -212,9 +212,9 @@ You can get the configuration from `~/.config/megfile/aliases.conf`, like:
212
212
  protocol = s3+tos
213
213
  ```
214
214
 
215
- ## Speed Test
216
- [![10GiB](https://github.com/megvii-research/megfile/blob/main/scripts/speed_test/10GiB.png?raw=true)](https://megvii-research.github.io/megfile/speed_test.html)
217
- [![10MiB](https://github.com/megvii-research/megfile/blob/main/scripts/speed_test/10MiB.png?raw=true)](https://megvii-research.github.io/megfile/speed_test.html)
215
+ ## Benchmark
216
+ [![10GiB](https://github.com/megvii-research/megfile/blob/main/scripts/benchmark/10GiB.png?raw=true)](https://megvii-research.github.io/megfile/benchmark.html)
217
+ [![10MiB](https://github.com/megvii-research/megfile/blob/main/scripts/benchmark/10MiB.png?raw=true)](https://megvii-research.github.io/megfile/benchmark.html)
218
218
 
219
219
  ## How to Contribute
220
220
  * We welcome everyone to contribute code to the `megfile` project, but the contributed code needs to meet the following conditions as much as possible:
@@ -1,24 +1,24 @@
1
1
  megfile/__init__.py,sha256=7oEfu410CFKzDWZ9RjL5xEJ1gtkJkTfvPrL_7TWdJuY,7366
2
- megfile/cli.py,sha256=e3VVr8oe8iR7L_PtpNtyqAvQL_WgJzzEz8oewSAlgX4,24887
3
- megfile/config.py,sha256=_6HiGeXEyk6RjPdjA0eEj1unq9iLJV_vQJBzQ-eHNvs,4185
4
- megfile/errors.py,sha256=a55qKQgyfiLmV-qnojUFzq2gu9JXpj3ZiC2qVaWyUTA,14160
2
+ megfile/cli.py,sha256=Y7ZyWsYmzMUjUrJ1Vz-WMWJQcpK-4og_5xyRzfKI91U,24957
3
+ megfile/config.py,sha256=2MMj5QkhlDJQFZRbCQL2c9iDdeMAVctiaPszRBkg5vM,3988
4
+ megfile/errors.py,sha256=KrzJFBgX7awoSJBADswCo4u6qbj-rcVQMN5It209Dvk,14333
5
5
  megfile/fs.py,sha256=bPGbapv41FzME87X3MhSNQRjNmHrI23FuFnjPT0ukQs,18239
6
- megfile/fs_path.py,sha256=ZK-po1xqhHocMb9Vrxf5K9tDx3zxQmGxNIHY3Z7Akp8,39085
6
+ megfile/fs_path.py,sha256=K485rResPqL56m9oIkbBBCTrUkoKBWIP5EjhjbpTkIY,39141
7
7
  megfile/hdfs.py,sha256=JfqTTvMzTeDFAJ-uq5BU0sjFaE3qshlVZ0_5h9oImfA,13361
8
- megfile/hdfs_path.py,sha256=_ir7En78K0nZudVOL7yYcKZgBQRZwlMDvWTRmYXEfOE,25637
8
+ megfile/hdfs_path.py,sha256=DbFuROHqlLo9o94N0HF1X83DMr7wKhZ9SR2mnKNVjyc,25739
9
9
  megfile/http.py,sha256=1nuGe-JbnwMFyV3s35CJxByED3uoRoS9y8Y8cSGP9Kw,3865
10
- megfile/http_path.py,sha256=c-xAu5wDxcTevmIUmrNEy-m-QiCfDJToaVI7y8SVIUI,14492
10
+ megfile/http_path.py,sha256=yRIk-fNbrsY8rUS5KVOfocS_PS520dX5KOs8lImpLaY,14173
11
11
  megfile/interfaces.py,sha256=p4UvVZpeLx5djd6bqqDaygIx_s-_AxIVj-gudTch4JE,8467
12
12
  megfile/pathlike.py,sha256=vfuTBqSTIciRxkkqMfLfnBxWTEl9yns1yR8zgK4Raw0,31268
13
- megfile/s3.py,sha256=zqAegH5tijcztEKcfHXmOYhAR880nTxaAzc2O0JJnjc,16661
14
- megfile/s3_path.py,sha256=lpUKy4n5DTf6hK6TvPhMjt_ZgdIXO4vcyK_VLaGkvhg,93395
13
+ megfile/s3.py,sha256=aVse1PsYchTkkMPUUW7GWfKOApvkl_Qh5pwNYdk7Ehc,16747
14
+ megfile/s3_path.py,sha256=hhp0zkLfxe1MnKGlLnvPLi7mSYQirt81yGU8ToWl6r8,93576
15
15
  megfile/sftp.py,sha256=0ZnQlmhgvs7pYjFTcvvOyxTo2IUurE-hp1GN0hnIrdQ,26473
16
- megfile/sftp_path.py,sha256=4rcbn3wqcOEs71W6qWu1efcj6MZUgrZm6U0Jan-eB70,43604
16
+ megfile/sftp_path.py,sha256=LttyxwoDMb2nhY4b9qUAHTichrG6Rec8Q_xUTqC6VWI,43662
17
17
  megfile/smart.py,sha256=Ps8acPx6jeG1UJnRD8xL2aQjRp7IMW8sV6VFkMF0TQk,36910
18
18
  megfile/smart_path.py,sha256=Bqg95T2-XZrRXWhH7GT-jMCYzD7i1SIXdczQxtOxiPs,7583
19
19
  megfile/stdio.py,sha256=C_cGID_npthpwoPcsJMMEqqbVUPUnDxxJV9jLY2_D7c,635
20
20
  megfile/stdio_path.py,sha256=L8ODNIwO79UIv13YYc2OTr6f4XTv4ZPyvBeRk83-AjA,2700
21
- megfile/version.py,sha256=secXeY4K_CdlbaUs9G--LGWfWlf1VqdtopDxqRzq2JQ,19
21
+ megfile/version.py,sha256=hJ-Qyt0gNnGNOAUMy6dMT2dsePu73CM3Gc3e0kxG4hQ,19
22
22
  megfile/lib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
23
  megfile/lib/base_prefetch_reader.py,sha256=6Dy2ZwlowqAvyUUa7bpQLCKOclmmUDhqEF-_CDDp0Og,13100
24
24
  megfile/lib/combine_reader.py,sha256=nKGAug29lOpNIZuLKu7_qVrJJRpXL_J4jxLglWbGJ1w,4808
@@ -36,27 +36,27 @@ megfile/lib/s3_cached_handler.py,sha256=X8PdeRC-BY6eSmOO5f2BeyjTPxyEwNtHgmAm9Vgm
36
36
  megfile/lib/s3_limited_seekable_writer.py,sha256=mUeoTS98LHluwDN7zxdCVcsjOGBT1bOYV8nRvi9QMGE,6212
37
37
  megfile/lib/s3_memory_handler.py,sha256=4uzBzz2jfRI_u6jl0CpOGAhpNJhDQo18FSAweauCUFs,4136
38
38
  megfile/lib/s3_pipe_handler.py,sha256=dm7NnZd1Ym5ABS1GvOQtoCJEO_CB8e6p4sUhLiid0go,3622
39
- megfile/lib/s3_prefetch_reader.py,sha256=dHltiM5Ui-SY4pqhvIsmC0iNmprXwlczDD4lNHB5WrQ,4418
39
+ megfile/lib/s3_prefetch_reader.py,sha256=R37-y_L9l8IKJhpT8HwBrZEbo2X72vCqEV6fvqPCBug,4437
40
40
  megfile/lib/s3_share_cache_reader.py,sha256=LVWKxHdHo0_zUIW4o8yqNvplqqwezUPeYEt02Vj-WNM,3754
41
41
  megfile/lib/shadow_handler.py,sha256=TntewlvIW9ZxCfmqASDQREHoiZ8v42faOe9sovQYQz0,2779
42
42
  megfile/lib/stdio_handler.py,sha256=IDdgENLQlhigEwkLL4zStueVSzdWg7xVcTF_koof_Ek,1987
43
43
  megfile/lib/url.py,sha256=ER32pWy9Q2MAk3TraAaNEBWIqUeBmLuM57ol2cs7-Ks,103
44
- megfile/utils/__init__.py,sha256=sATf_NlsSTYIMEiA8-gM6K1M-Q1K6_7rx2VM31hrqaA,10838
44
+ megfile/utils/__init__.py,sha256=xAzmICA0MtAbg-I2yPfeHjA1N4CiMP4sBrC9BgrfZLw,10151
45
45
  megfile/utils/mutex.py,sha256=asb8opGLgK22RiuBJUnfsvB8LnMmodP8KzCVHKmQBWA,2561
46
- scripts/speed_test/code/iopath_read.py,sha256=O1Qs3mpvD9S_XCuRH2A2FpGWxCOSw6qZvEBrtPeRL1E,661
47
- scripts/speed_test/code/iopath_write.py,sha256=Mm0efW1J09RJ_CK5i1xtG2hJuaaslikin8qVpuRFP_Q,704
48
- scripts/speed_test/code/megfile_read.py,sha256=sAMebUiMColHDv3JEkXplImAHvn_IF1-g3BIJxhcQYE,239
49
- scripts/speed_test/code/megfile_write.py,sha256=bzn-i2bGH4eRcsVvkhXK35KsQkX2v2oEsOJ0Ft5saj0,257
50
- scripts/speed_test/code/pyarrow_read.py,sha256=2QBGKjGV2Dvl2ukOntLSag33pF55l3tfZ2Z6dLUjovw,305
51
- scripts/speed_test/code/pyarrow_write.py,sha256=U1puLh-ljSXv772bZYAyhzmxhPOq4aR4j-QtwdM9hG0,328
52
- scripts/speed_test/code/s3fs_read.py,sha256=XiTA-qrYblUs-jQWXSnvNg5Wo722C_g47aMMfo5XJBY,380
53
- scripts/speed_test/code/s3fs_write.py,sha256=gdXKkWXYGjLJlRT_J64pJN85XvRg3bZexcAJQEMXwtw,402
54
- scripts/speed_test/code/smart_open_read.py,sha256=SA02jHwS9Y31yFtV9CoJcfND5dR0eA_HsGmGNUrpQls,515
55
- scripts/speed_test/code/smart_open_write.py,sha256=jDxFJdY97yNH889jz3pawBoei3yaqy8pEMvC_ymHFtM,537
56
- megfile-4.0.2.dist-info/LICENSE,sha256=WNHhf_5RCaeuKWyq_K39vmp9F28LxKsB4SpomwSZ2L0,11357
57
- megfile-4.0.2.dist-info/LICENSE.pyre,sha256=9lf5nT-5ZH25JijpYAequ0bl8E8z5JmZB1qrjiUMp84,1080
58
- megfile-4.0.2.dist-info/METADATA,sha256=A8TXdy5RWnQFzA7e2r9h757InExc7l7JaSN3O3felgk,9578
59
- megfile-4.0.2.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
60
- megfile-4.0.2.dist-info/entry_points.txt,sha256=M6ZWSSv5_5_QtIpZafy3vq7WuOJ_5dSGQQnEZbByt2Q,49
61
- megfile-4.0.2.dist-info/top_level.txt,sha256=fVg49lk5B9L7jyfWUXWxb0DDSuw5pbr0OU62Tvx8J8M,44
62
- megfile-4.0.2.dist-info/RECORD,,
46
+ scripts/benchmark/code/iopath_read.py,sha256=O1Qs3mpvD9S_XCuRH2A2FpGWxCOSw6qZvEBrtPeRL1E,661
47
+ scripts/benchmark/code/iopath_write.py,sha256=Mm0efW1J09RJ_CK5i1xtG2hJuaaslikin8qVpuRFP_Q,704
48
+ scripts/benchmark/code/megfile_read.py,sha256=sAMebUiMColHDv3JEkXplImAHvn_IF1-g3BIJxhcQYE,239
49
+ scripts/benchmark/code/megfile_write.py,sha256=bzn-i2bGH4eRcsVvkhXK35KsQkX2v2oEsOJ0Ft5saj0,257
50
+ scripts/benchmark/code/pyarrow_read.py,sha256=2QBGKjGV2Dvl2ukOntLSag33pF55l3tfZ2Z6dLUjovw,305
51
+ scripts/benchmark/code/pyarrow_write.py,sha256=U1puLh-ljSXv772bZYAyhzmxhPOq4aR4j-QtwdM9hG0,328
52
+ scripts/benchmark/code/s3fs_read.py,sha256=XiTA-qrYblUs-jQWXSnvNg5Wo722C_g47aMMfo5XJBY,380
53
+ scripts/benchmark/code/s3fs_write.py,sha256=gdXKkWXYGjLJlRT_J64pJN85XvRg3bZexcAJQEMXwtw,402
54
+ scripts/benchmark/code/smart_open_read.py,sha256=SA02jHwS9Y31yFtV9CoJcfND5dR0eA_HsGmGNUrpQls,515
55
+ scripts/benchmark/code/smart_open_write.py,sha256=jDxFJdY97yNH889jz3pawBoei3yaqy8pEMvC_ymHFtM,537
56
+ megfile-4.0.4.dist-info/LICENSE,sha256=WNHhf_5RCaeuKWyq_K39vmp9F28LxKsB4SpomwSZ2L0,11357
57
+ megfile-4.0.4.dist-info/LICENSE.pyre,sha256=9lf5nT-5ZH25JijpYAequ0bl8E8z5JmZB1qrjiUMp84,1080
58
+ megfile-4.0.4.dist-info/METADATA,sha256=ZOXOJNvkRh_LaCknY7VU_qS2KM6LNJf3MZVN6Pygsg0,9573
59
+ megfile-4.0.4.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
60
+ megfile-4.0.4.dist-info/entry_points.txt,sha256=M6ZWSSv5_5_QtIpZafy3vq7WuOJ_5dSGQQnEZbByt2Q,49
61
+ megfile-4.0.4.dist-info/top_level.txt,sha256=fVg49lk5B9L7jyfWUXWxb0DDSuw5pbr0OU62Tvx8J8M,44
62
+ megfile-4.0.4.dist-info/RECORD,,
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes