fsspec 2024.6.1__py3-none-any.whl → 2024.10.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fsspec/_version.py +2 -2
- fsspec/asyn.py +5 -1
- fsspec/core.py +6 -3
- fsspec/fuse.py +4 -4
- fsspec/gui.py +4 -2
- fsspec/implementations/arrow.py +1 -1
- fsspec/implementations/dbfs.py +21 -21
- fsspec/implementations/dirfs.py +19 -1
- fsspec/implementations/ftp.py +23 -13
- fsspec/implementations/git.py +27 -39
- fsspec/implementations/http.py +13 -11
- fsspec/implementations/local.py +11 -7
- fsspec/implementations/memory.py +13 -9
- fsspec/implementations/reference.py +77 -47
- fsspec/implementations/smb.py +78 -5
- fsspec/implementations/webhdfs.py +2 -2
- fsspec/implementations/zip.py +44 -1
- fsspec/mapping.py +4 -4
- fsspec/spec.py +4 -6
- fsspec/tests/abstract/__init__.py +3 -3
- fsspec/utils.py +1 -4
- {fsspec-2024.6.1.dist-info → fsspec-2024.10.0.dist-info}/METADATA +1 -1
- {fsspec-2024.6.1.dist-info → fsspec-2024.10.0.dist-info}/RECORD +25 -25
- {fsspec-2024.6.1.dist-info → fsspec-2024.10.0.dist-info}/WHEEL +0 -0
- {fsspec-2024.6.1.dist-info → fsspec-2024.10.0.dist-info}/licenses/LICENSE +0 -0
fsspec/_version.py
CHANGED
|
@@ -12,5 +12,5 @@ __version__: str
|
|
|
12
12
|
__version_tuple__: VERSION_TUPLE
|
|
13
13
|
version_tuple: VERSION_TUPLE
|
|
14
14
|
|
|
15
|
-
__version__ = version = '2024.
|
|
16
|
-
__version_tuple__ = version_tuple = (2024,
|
|
15
|
+
__version__ = version = '2024.10.0'
|
|
16
|
+
__version_tuple__ = version_tuple = (2024, 10, 0)
|
fsspec/asyn.py
CHANGED
|
@@ -344,6 +344,10 @@ class AsyncFileSystem(AbstractFileSystem):
|
|
|
344
344
|
async def _cp_file(self, path1, path2, **kwargs):
|
|
345
345
|
raise NotImplementedError
|
|
346
346
|
|
|
347
|
+
async def _mv_file(self, path1, path2):
|
|
348
|
+
await self._cp_file(path1, path2)
|
|
349
|
+
await self._rm_file(path1)
|
|
350
|
+
|
|
347
351
|
async def _copy(
|
|
348
352
|
self,
|
|
349
353
|
path1,
|
|
@@ -1072,7 +1076,7 @@ class AbstractAsyncStreamedFile(AbstractBufferedFile):
|
|
|
1072
1076
|
self.offset = 0
|
|
1073
1077
|
try:
|
|
1074
1078
|
await self._initiate_upload()
|
|
1075
|
-
except:
|
|
1079
|
+
except:
|
|
1076
1080
|
self.closed = True
|
|
1077
1081
|
raise
|
|
1078
1082
|
|
fsspec/core.py
CHANGED
|
@@ -346,7 +346,10 @@ def _un_chain(path, kwargs):
|
|
|
346
346
|
kws = kwargs.pop(protocol, {})
|
|
347
347
|
if bit is bits[0]:
|
|
348
348
|
kws.update(kwargs)
|
|
349
|
-
kw = dict(
|
|
349
|
+
kw = dict(
|
|
350
|
+
**{k: v for k, v in extra_kwargs.items() if k not in kws or v != kws[k]},
|
|
351
|
+
**kws,
|
|
352
|
+
)
|
|
350
353
|
bit = cls._strip_protocol(bit)
|
|
351
354
|
if (
|
|
352
355
|
protocol in {"blockcache", "filecache", "simplecache"}
|
|
@@ -578,7 +581,7 @@ def expand_paths_if_needed(paths, mode, num, fs, name_function):
|
|
|
578
581
|
paths = list(paths)
|
|
579
582
|
|
|
580
583
|
if "w" in mode: # read mode
|
|
581
|
-
if sum(
|
|
584
|
+
if sum(1 for p in paths if "*" in p) > 1:
|
|
582
585
|
raise ValueError(
|
|
583
586
|
"When writing data, only one filename mask can be specified."
|
|
584
587
|
)
|
|
@@ -639,7 +642,7 @@ def get_fs_token_paths(
|
|
|
639
642
|
if isinstance(urlpath, (list, tuple, set)):
|
|
640
643
|
if not urlpath:
|
|
641
644
|
raise ValueError("empty urlpath sequence")
|
|
642
|
-
urlpath0 = stringify_path(
|
|
645
|
+
urlpath0 = stringify_path(next(iter(urlpath)))
|
|
643
646
|
else:
|
|
644
647
|
urlpath0 = stringify_path(urlpath)
|
|
645
648
|
storage_options = storage_options or {}
|
fsspec/fuse.py
CHANGED
|
@@ -31,8 +31,8 @@ class FUSEr(Operations):
|
|
|
31
31
|
path = "".join([self.root, path.lstrip("/")]).rstrip("/")
|
|
32
32
|
try:
|
|
33
33
|
info = self.fs.info(path)
|
|
34
|
-
except FileNotFoundError:
|
|
35
|
-
raise FuseOSError(ENOENT)
|
|
34
|
+
except FileNotFoundError as exc:
|
|
35
|
+
raise FuseOSError(ENOENT) from exc
|
|
36
36
|
|
|
37
37
|
data = {"st_uid": info.get("uid", 1000), "st_gid": info.get("gid", 1000)}
|
|
38
38
|
perm = info.get("mode", 0o777)
|
|
@@ -119,8 +119,8 @@ class FUSEr(Operations):
|
|
|
119
119
|
fn = "".join([self.root, path.lstrip("/")])
|
|
120
120
|
try:
|
|
121
121
|
self.fs.rm(fn, False)
|
|
122
|
-
except (OSError, FileNotFoundError):
|
|
123
|
-
raise FuseOSError(EIO)
|
|
122
|
+
except (OSError, FileNotFoundError) as exc:
|
|
123
|
+
raise FuseOSError(EIO) from exc
|
|
124
124
|
|
|
125
125
|
def release(self, path, fh):
|
|
126
126
|
try:
|
fsspec/gui.py
CHANGED
|
@@ -93,8 +93,10 @@ class SigSlot:
|
|
|
93
93
|
"""Display in a notebook or a server"""
|
|
94
94
|
try:
|
|
95
95
|
return self.panel._repr_mimebundle_(*args, **kwargs)
|
|
96
|
-
except (ValueError, AttributeError):
|
|
97
|
-
raise NotImplementedError(
|
|
96
|
+
except (ValueError, AttributeError) as exc:
|
|
97
|
+
raise NotImplementedError(
|
|
98
|
+
"Panel does not seem to be set up properly"
|
|
99
|
+
) from exc
|
|
98
100
|
|
|
99
101
|
def connect(self, signal, slot):
|
|
100
102
|
"""Associate call back with given event
|
fsspec/implementations/arrow.py
CHANGED
|
@@ -128,7 +128,7 @@ class ArrowFSWrapper(AbstractFileSystem):
|
|
|
128
128
|
with self.open(tmp_fname, "wb") as rstream:
|
|
129
129
|
shutil.copyfileobj(lstream, rstream)
|
|
130
130
|
self.fs.move(tmp_fname, path2)
|
|
131
|
-
except BaseException:
|
|
131
|
+
except BaseException:
|
|
132
132
|
with suppress(FileNotFoundError):
|
|
133
133
|
self.fs.delete_file(tmp_fname)
|
|
134
134
|
raise
|
fsspec/implementations/dbfs.py
CHANGED
|
@@ -77,9 +77,9 @@ class DatabricksFileSystem(AbstractFileSystem):
|
|
|
77
77
|
)
|
|
78
78
|
except DatabricksException as e:
|
|
79
79
|
if e.error_code == "RESOURCE_DOES_NOT_EXIST":
|
|
80
|
-
raise FileNotFoundError(e.message)
|
|
80
|
+
raise FileNotFoundError(e.message) from e
|
|
81
81
|
|
|
82
|
-
raise
|
|
82
|
+
raise
|
|
83
83
|
files = r["files"]
|
|
84
84
|
out = [
|
|
85
85
|
{
|
|
@@ -123,9 +123,9 @@ class DatabricksFileSystem(AbstractFileSystem):
|
|
|
123
123
|
self._send_to_api(method="post", endpoint="mkdirs", json={"path": path})
|
|
124
124
|
except DatabricksException as e:
|
|
125
125
|
if e.error_code == "RESOURCE_ALREADY_EXISTS":
|
|
126
|
-
raise FileExistsError(e.message)
|
|
126
|
+
raise FileExistsError(e.message) from e
|
|
127
127
|
|
|
128
|
-
raise
|
|
128
|
+
raise
|
|
129
129
|
self.invalidate_cache(self._parent(path))
|
|
130
130
|
|
|
131
131
|
def mkdir(self, path, create_parents=True, **kwargs):
|
|
@@ -169,9 +169,9 @@ class DatabricksFileSystem(AbstractFileSystem):
|
|
|
169
169
|
self.rm(path=path, recursive=recursive)
|
|
170
170
|
elif e.error_code == "IO_ERROR":
|
|
171
171
|
# Using the same exception as the os module would use here
|
|
172
|
-
raise OSError(e.message)
|
|
172
|
+
raise OSError(e.message) from e
|
|
173
173
|
|
|
174
|
-
raise
|
|
174
|
+
raise
|
|
175
175
|
self.invalidate_cache(self._parent(path))
|
|
176
176
|
|
|
177
177
|
def mv(
|
|
@@ -212,11 +212,11 @@ class DatabricksFileSystem(AbstractFileSystem):
|
|
|
212
212
|
)
|
|
213
213
|
except DatabricksException as e:
|
|
214
214
|
if e.error_code == "RESOURCE_DOES_NOT_EXIST":
|
|
215
|
-
raise FileNotFoundError(e.message)
|
|
215
|
+
raise FileNotFoundError(e.message) from e
|
|
216
216
|
elif e.error_code == "RESOURCE_ALREADY_EXISTS":
|
|
217
|
-
raise FileExistsError(e.message)
|
|
217
|
+
raise FileExistsError(e.message) from e
|
|
218
218
|
|
|
219
|
-
raise
|
|
219
|
+
raise
|
|
220
220
|
self.invalidate_cache(self._parent(source_path))
|
|
221
221
|
self.invalidate_cache(self._parent(destination_path))
|
|
222
222
|
|
|
@@ -264,9 +264,9 @@ class DatabricksFileSystem(AbstractFileSystem):
|
|
|
264
264
|
try:
|
|
265
265
|
exception_json = e.response.json()
|
|
266
266
|
except Exception:
|
|
267
|
-
raise e
|
|
267
|
+
raise e from None
|
|
268
268
|
|
|
269
|
-
raise DatabricksException(**exception_json)
|
|
269
|
+
raise DatabricksException(**exception_json) from e
|
|
270
270
|
|
|
271
271
|
return r.json()
|
|
272
272
|
|
|
@@ -297,9 +297,9 @@ class DatabricksFileSystem(AbstractFileSystem):
|
|
|
297
297
|
return r["handle"]
|
|
298
298
|
except DatabricksException as e:
|
|
299
299
|
if e.error_code == "RESOURCE_ALREADY_EXISTS":
|
|
300
|
-
raise FileExistsError(e.message)
|
|
300
|
+
raise FileExistsError(e.message) from e
|
|
301
301
|
|
|
302
|
-
raise
|
|
302
|
+
raise
|
|
303
303
|
|
|
304
304
|
def _close_handle(self, handle):
|
|
305
305
|
"""
|
|
@@ -314,9 +314,9 @@ class DatabricksFileSystem(AbstractFileSystem):
|
|
|
314
314
|
self._send_to_api(method="post", endpoint="close", json={"handle": handle})
|
|
315
315
|
except DatabricksException as e:
|
|
316
316
|
if e.error_code == "RESOURCE_DOES_NOT_EXIST":
|
|
317
|
-
raise FileNotFoundError(e.message)
|
|
317
|
+
raise FileNotFoundError(e.message) from e
|
|
318
318
|
|
|
319
|
-
raise
|
|
319
|
+
raise
|
|
320
320
|
|
|
321
321
|
def _add_data(self, handle, data):
|
|
322
322
|
"""
|
|
@@ -342,11 +342,11 @@ class DatabricksFileSystem(AbstractFileSystem):
|
|
|
342
342
|
)
|
|
343
343
|
except DatabricksException as e:
|
|
344
344
|
if e.error_code == "RESOURCE_DOES_NOT_EXIST":
|
|
345
|
-
raise FileNotFoundError(e.message)
|
|
345
|
+
raise FileNotFoundError(e.message) from e
|
|
346
346
|
elif e.error_code == "MAX_BLOCK_SIZE_EXCEEDED":
|
|
347
|
-
raise ValueError(e.message)
|
|
347
|
+
raise ValueError(e.message) from e
|
|
348
348
|
|
|
349
|
-
raise
|
|
349
|
+
raise
|
|
350
350
|
|
|
351
351
|
def _get_data(self, path, start, end):
|
|
352
352
|
"""
|
|
@@ -372,11 +372,11 @@ class DatabricksFileSystem(AbstractFileSystem):
|
|
|
372
372
|
return base64.b64decode(r["data"])
|
|
373
373
|
except DatabricksException as e:
|
|
374
374
|
if e.error_code == "RESOURCE_DOES_NOT_EXIST":
|
|
375
|
-
raise FileNotFoundError(e.message)
|
|
375
|
+
raise FileNotFoundError(e.message) from e
|
|
376
376
|
elif e.error_code in ["INVALID_PARAMETER_VALUE", "MAX_READ_SIZE_EXCEEDED"]:
|
|
377
|
-
raise ValueError(e.message)
|
|
377
|
+
raise ValueError(e.message) from e
|
|
378
378
|
|
|
379
|
-
raise
|
|
379
|
+
raise
|
|
380
380
|
|
|
381
381
|
def invalidate_cache(self, path=None):
|
|
382
382
|
if path is None:
|
fsspec/implementations/dirfs.py
CHANGED
|
@@ -64,9 +64,15 @@ class DirFileSystem(AsyncFileSystem):
|
|
|
64
64
|
if isinstance(path, str):
|
|
65
65
|
if not self.path:
|
|
66
66
|
return path
|
|
67
|
-
|
|
67
|
+
# We need to account for S3FileSystem returning paths that do not
|
|
68
|
+
# start with a '/'
|
|
69
|
+
if path == self.path or (
|
|
70
|
+
self.path.startswith(self.fs.sep) and path == self.path[1:]
|
|
71
|
+
):
|
|
68
72
|
return ""
|
|
69
73
|
prefix = self.path + self.fs.sep
|
|
74
|
+
if self.path.startswith(self.fs.sep) and not path.startswith(self.fs.sep):
|
|
75
|
+
prefix = prefix[1:]
|
|
70
76
|
assert path.startswith(prefix)
|
|
71
77
|
return path[len(prefix) :]
|
|
72
78
|
return [self._relpath(_path) for _path in path]
|
|
@@ -364,3 +370,15 @@ class DirFileSystem(AsyncFileSystem):
|
|
|
364
370
|
*args,
|
|
365
371
|
**kwargs,
|
|
366
372
|
)
|
|
373
|
+
|
|
374
|
+
async def open_async(
|
|
375
|
+
self,
|
|
376
|
+
path,
|
|
377
|
+
*args,
|
|
378
|
+
**kwargs,
|
|
379
|
+
):
|
|
380
|
+
return await self.fs.open_async(
|
|
381
|
+
self._join(path),
|
|
382
|
+
*args,
|
|
383
|
+
**kwargs,
|
|
384
|
+
)
|
fsspec/implementations/ftp.py
CHANGED
|
@@ -2,7 +2,7 @@ import os
|
|
|
2
2
|
import sys
|
|
3
3
|
import uuid
|
|
4
4
|
import warnings
|
|
5
|
-
from ftplib import FTP, Error, error_perm
|
|
5
|
+
from ftplib import FTP, FTP_TLS, Error, error_perm
|
|
6
6
|
from typing import Any
|
|
7
7
|
|
|
8
8
|
from ..spec import AbstractBufferedFile, AbstractFileSystem
|
|
@@ -27,6 +27,7 @@ class FTPFileSystem(AbstractFileSystem):
|
|
|
27
27
|
tempdir=None,
|
|
28
28
|
timeout=30,
|
|
29
29
|
encoding="utf-8",
|
|
30
|
+
tls=False,
|
|
30
31
|
**kwargs,
|
|
31
32
|
):
|
|
32
33
|
"""
|
|
@@ -56,28 +57,37 @@ class FTPFileSystem(AbstractFileSystem):
|
|
|
56
57
|
Timeout of the ftp connection in seconds
|
|
57
58
|
encoding: str
|
|
58
59
|
Encoding to use for directories and filenames in FTP connection
|
|
60
|
+
tls: bool
|
|
61
|
+
Use FTP-TLS, by default False
|
|
59
62
|
"""
|
|
60
63
|
super().__init__(**kwargs)
|
|
61
64
|
self.host = host
|
|
62
65
|
self.port = port
|
|
63
66
|
self.tempdir = tempdir or "/tmp"
|
|
64
|
-
self.cred = username, password, acct
|
|
67
|
+
self.cred = username or "", password or "", acct or ""
|
|
65
68
|
self.timeout = timeout
|
|
66
69
|
self.encoding = encoding
|
|
67
70
|
if block_size is not None:
|
|
68
71
|
self.blocksize = block_size
|
|
69
72
|
else:
|
|
70
73
|
self.blocksize = 2**16
|
|
74
|
+
self.tls = tls
|
|
71
75
|
self._connect()
|
|
76
|
+
if self.tls:
|
|
77
|
+
self.ftp.prot_p()
|
|
72
78
|
|
|
73
79
|
def _connect(self):
|
|
80
|
+
if self.tls:
|
|
81
|
+
ftp_cls = FTP_TLS
|
|
82
|
+
else:
|
|
83
|
+
ftp_cls = FTP
|
|
74
84
|
if sys.version_info >= (3, 9):
|
|
75
|
-
self.ftp =
|
|
85
|
+
self.ftp = ftp_cls(timeout=self.timeout, encoding=self.encoding)
|
|
76
86
|
elif self.encoding:
|
|
77
87
|
warnings.warn("`encoding` not supported for python<3.9, ignoring")
|
|
78
|
-
self.ftp =
|
|
88
|
+
self.ftp = ftp_cls(timeout=self.timeout)
|
|
79
89
|
else:
|
|
80
|
-
self.ftp =
|
|
90
|
+
self.ftp = ftp_cls(timeout=self.timeout)
|
|
81
91
|
self.ftp.connect(self.host, self.port)
|
|
82
92
|
self.ftp.login(*self.cred)
|
|
83
93
|
|
|
@@ -107,9 +117,9 @@ class FTPFileSystem(AbstractFileSystem):
|
|
|
107
117
|
except error_perm:
|
|
108
118
|
out = _mlsd2(self.ftp, path) # Not platform independent
|
|
109
119
|
for fn, details in out:
|
|
110
|
-
|
|
111
|
-
path
|
|
112
|
-
|
|
120
|
+
details["name"] = "/".join(
|
|
121
|
+
["" if path == "/" else path, fn.lstrip("/")]
|
|
122
|
+
)
|
|
113
123
|
if details["type"] == "file":
|
|
114
124
|
details["size"] = int(details["size"])
|
|
115
125
|
else:
|
|
@@ -122,8 +132,8 @@ class FTPFileSystem(AbstractFileSystem):
|
|
|
122
132
|
info = self.info(path)
|
|
123
133
|
if info["type"] == "file":
|
|
124
134
|
out = [(path, info)]
|
|
125
|
-
except (Error, IndexError):
|
|
126
|
-
raise FileNotFoundError(path)
|
|
135
|
+
except (Error, IndexError) as exc:
|
|
136
|
+
raise FileNotFoundError(path) from exc
|
|
127
137
|
files = self.dircache.get(path, out)
|
|
128
138
|
if not detail:
|
|
129
139
|
return sorted([fn for fn, details in files])
|
|
@@ -137,9 +147,9 @@ class FTPFileSystem(AbstractFileSystem):
|
|
|
137
147
|
return {"name": "/", "size": 0, "type": "directory"}
|
|
138
148
|
files = self.ls(self._parent(path).lstrip("/"), True)
|
|
139
149
|
try:
|
|
140
|
-
out =
|
|
141
|
-
except
|
|
142
|
-
raise FileNotFoundError(path)
|
|
150
|
+
out = next(f for f in files if f["name"] == path)
|
|
151
|
+
except StopIteration as exc:
|
|
152
|
+
raise FileNotFoundError(path) from exc
|
|
143
153
|
return out
|
|
144
154
|
|
|
145
155
|
def get_file(self, rpath, lpath, **kwargs):
|
fsspec/implementations/git.py
CHANGED
|
@@ -55,6 +55,8 @@ class GitFileSystem(AbstractFileSystem):
|
|
|
55
55
|
tree = comm.tree
|
|
56
56
|
for part in parts:
|
|
57
57
|
if part and isinstance(tree, pygit2.Tree):
|
|
58
|
+
if part not in tree:
|
|
59
|
+
raise FileNotFoundError(path)
|
|
58
60
|
tree = tree[part]
|
|
59
61
|
return tree
|
|
60
62
|
|
|
@@ -69,46 +71,32 @@ class GitFileSystem(AbstractFileSystem):
|
|
|
69
71
|
out["ref"], path = path.split("@", 1)
|
|
70
72
|
return out
|
|
71
73
|
|
|
74
|
+
@staticmethod
|
|
75
|
+
def _object_to_info(obj, path=None):
|
|
76
|
+
# obj.name and obj.filemode are None for the root tree!
|
|
77
|
+
is_dir = isinstance(obj, pygit2.Tree)
|
|
78
|
+
return {
|
|
79
|
+
"type": "directory" if is_dir else "file",
|
|
80
|
+
"name": (
|
|
81
|
+
"/".join([path, obj.name or ""]).lstrip("/") if path else obj.name
|
|
82
|
+
),
|
|
83
|
+
"hex": str(obj.id),
|
|
84
|
+
"mode": "100644" if obj.filemode is None else f"{obj.filemode:o}",
|
|
85
|
+
"size": 0 if is_dir else obj.size,
|
|
86
|
+
}
|
|
87
|
+
|
|
72
88
|
def ls(self, path, detail=True, ref=None, **kwargs):
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
"mode": f"{obj.filemode:o}",
|
|
85
|
-
"size": 0,
|
|
86
|
-
}
|
|
87
|
-
)
|
|
88
|
-
else:
|
|
89
|
-
out.append(
|
|
90
|
-
{
|
|
91
|
-
"type": "file",
|
|
92
|
-
"name": "/".join([path, obj.name]).lstrip("/"),
|
|
93
|
-
"hex": obj.hex,
|
|
94
|
-
"mode": f"{obj.filemode:o}",
|
|
95
|
-
"size": obj.size,
|
|
96
|
-
}
|
|
97
|
-
)
|
|
98
|
-
else:
|
|
99
|
-
obj = tree
|
|
100
|
-
out = [
|
|
101
|
-
{
|
|
102
|
-
"type": "file",
|
|
103
|
-
"name": obj.name,
|
|
104
|
-
"hex": obj.hex,
|
|
105
|
-
"mode": f"{obj.filemode:o}",
|
|
106
|
-
"size": obj.size,
|
|
107
|
-
}
|
|
108
|
-
]
|
|
109
|
-
if detail:
|
|
110
|
-
return out
|
|
111
|
-
return [o["name"] for o in out]
|
|
89
|
+
tree = self._path_to_object(self._strip_protocol(path), ref)
|
|
90
|
+
return [
|
|
91
|
+
GitFileSystem._object_to_info(obj, path)
|
|
92
|
+
if detail
|
|
93
|
+
else GitFileSystem._object_to_info(obj, path)["name"]
|
|
94
|
+
for obj in (tree if isinstance(tree, pygit2.Tree) else [tree])
|
|
95
|
+
]
|
|
96
|
+
|
|
97
|
+
def info(self, path, ref=None, **kwargs):
|
|
98
|
+
tree = self._path_to_object(self._strip_protocol(path), ref)
|
|
99
|
+
return GitFileSystem._object_to_info(tree, path)
|
|
112
100
|
|
|
113
101
|
def ukey(self, path, ref=None):
|
|
114
102
|
return self.info(path, ref=ref)["hex"]
|
fsspec/implementations/http.py
CHANGED
|
@@ -254,7 +254,7 @@ class HTTPFileSystem(AsyncFileSystem):
|
|
|
254
254
|
if isfilelike(lpath):
|
|
255
255
|
outfile = lpath
|
|
256
256
|
else:
|
|
257
|
-
outfile = open(lpath, "wb") # noqa: ASYNC101
|
|
257
|
+
outfile = open(lpath, "wb") # noqa: ASYNC101, ASYNC230
|
|
258
258
|
|
|
259
259
|
try:
|
|
260
260
|
chunk = True
|
|
@@ -282,7 +282,7 @@ class HTTPFileSystem(AsyncFileSystem):
|
|
|
282
282
|
context = nullcontext(lpath)
|
|
283
283
|
use_seek = False # might not support seeking
|
|
284
284
|
else:
|
|
285
|
-
context = open(lpath, "rb") # noqa: ASYNC101
|
|
285
|
+
context = open(lpath, "rb") # noqa: ASYNC101, ASYNC230
|
|
286
286
|
use_seek = True
|
|
287
287
|
|
|
288
288
|
with context as f:
|
|
@@ -358,9 +358,10 @@ class HTTPFileSystem(AsyncFileSystem):
|
|
|
358
358
|
kw = self.kwargs.copy()
|
|
359
359
|
kw["asynchronous"] = self.asynchronous
|
|
360
360
|
kw.update(kwargs)
|
|
361
|
-
|
|
361
|
+
info = {}
|
|
362
|
+
size = size or info.update(self.info(path, **kwargs)) or info["size"]
|
|
362
363
|
session = sync(self.loop, self.set_session)
|
|
363
|
-
if block_size and size:
|
|
364
|
+
if block_size and size and info.get("partial", True):
|
|
364
365
|
return HTTPFile(
|
|
365
366
|
self,
|
|
366
367
|
path,
|
|
@@ -520,9 +521,9 @@ class HTTPFileSystem(AsyncFileSystem):
|
|
|
520
521
|
|
|
521
522
|
class HTTPFile(AbstractBufferedFile):
|
|
522
523
|
"""
|
|
523
|
-
A file-like object pointing to a
|
|
524
|
+
A file-like object pointing to a remote HTTP(S) resource
|
|
524
525
|
|
|
525
|
-
Supports only reading, with read-ahead of a
|
|
526
|
+
Supports only reading, with read-ahead of a predetermined block-size.
|
|
526
527
|
|
|
527
528
|
In the case that the server does not supply the filesize, only reading of
|
|
528
529
|
the complete file in one go is supported.
|
|
@@ -805,7 +806,7 @@ async def get_range(session, url, start, end, file=None, **kwargs):
|
|
|
805
806
|
async with r:
|
|
806
807
|
out = await r.read()
|
|
807
808
|
if file:
|
|
808
|
-
with open(file, "r+b") as f: # noqa: ASYNC101
|
|
809
|
+
with open(file, "r+b") as f: # noqa: ASYNC101, ASYNC230
|
|
809
810
|
f.seek(start)
|
|
810
811
|
f.write(out)
|
|
811
812
|
else:
|
|
@@ -835,10 +836,6 @@ async def _file_info(url, session, size_policy="head", **kwargs):
|
|
|
835
836
|
async with r:
|
|
836
837
|
r.raise_for_status()
|
|
837
838
|
|
|
838
|
-
# TODO:
|
|
839
|
-
# recognise lack of 'Accept-Ranges',
|
|
840
|
-
# or 'Accept-Ranges': 'none' (not 'bytes')
|
|
841
|
-
# to mean streaming only, no random access => return None
|
|
842
839
|
if "Content-Length" in r.headers:
|
|
843
840
|
# Some servers may choose to ignore Accept-Encoding and return
|
|
844
841
|
# compressed content, in which case the returned size is unreliable.
|
|
@@ -853,6 +850,11 @@ async def _file_info(url, session, size_policy="head", **kwargs):
|
|
|
853
850
|
if "Content-Type" in r.headers:
|
|
854
851
|
info["mimetype"] = r.headers["Content-Type"].partition(";")[0]
|
|
855
852
|
|
|
853
|
+
if r.headers.get("Accept-Ranges") == "none":
|
|
854
|
+
# Some servers may explicitly discourage partial content requests, but
|
|
855
|
+
# the lack of "Accept-Ranges" does not always indicate they would fail
|
|
856
|
+
info["partial"] = False
|
|
857
|
+
|
|
856
858
|
info["url"] = str(r.url)
|
|
857
859
|
|
|
858
860
|
for checksum_field in ["ETag", "Content-MD5", "Digest"]:
|
fsspec/implementations/local.py
CHANGED
|
@@ -79,6 +79,14 @@ class LocalFileSystem(AbstractFileSystem):
|
|
|
79
79
|
t = "file"
|
|
80
80
|
else:
|
|
81
81
|
t = "other"
|
|
82
|
+
|
|
83
|
+
size = out.st_size
|
|
84
|
+
if link:
|
|
85
|
+
try:
|
|
86
|
+
out2 = path.stat(follow_symlinks=True)
|
|
87
|
+
size = out2.st_size
|
|
88
|
+
except OSError:
|
|
89
|
+
size = 0
|
|
82
90
|
path = self._strip_protocol(path.path)
|
|
83
91
|
else:
|
|
84
92
|
# str or path-like
|
|
@@ -87,6 +95,7 @@ class LocalFileSystem(AbstractFileSystem):
|
|
|
87
95
|
link = stat.S_ISLNK(out.st_mode)
|
|
88
96
|
if link:
|
|
89
97
|
out = os.stat(path, follow_symlinks=True)
|
|
98
|
+
size = out.st_size
|
|
90
99
|
if stat.S_ISDIR(out.st_mode):
|
|
91
100
|
t = "directory"
|
|
92
101
|
elif stat.S_ISREG(out.st_mode):
|
|
@@ -95,20 +104,15 @@ class LocalFileSystem(AbstractFileSystem):
|
|
|
95
104
|
t = "other"
|
|
96
105
|
result = {
|
|
97
106
|
"name": path,
|
|
98
|
-
"size":
|
|
107
|
+
"size": size,
|
|
99
108
|
"type": t,
|
|
100
109
|
"created": out.st_ctime,
|
|
101
110
|
"islink": link,
|
|
102
111
|
}
|
|
103
112
|
for field in ["mode", "uid", "gid", "mtime", "ino", "nlink"]:
|
|
104
113
|
result[field] = getattr(out, f"st_{field}")
|
|
105
|
-
if
|
|
114
|
+
if link:
|
|
106
115
|
result["destination"] = os.readlink(path)
|
|
107
|
-
try:
|
|
108
|
-
out2 = os.stat(path, follow_symlinks=True)
|
|
109
|
-
result["size"] = out2.st_size
|
|
110
|
-
except OSError:
|
|
111
|
-
result["size"] = 0
|
|
112
116
|
return result
|
|
113
117
|
|
|
114
118
|
def lexists(self, path, **kwargs):
|
fsspec/implementations/memory.py
CHANGED
|
@@ -224,8 +224,8 @@ class MemoryFileSystem(AbstractFileSystem):
|
|
|
224
224
|
path = self._strip_protocol(path)
|
|
225
225
|
try:
|
|
226
226
|
return bytes(self.store[path].getbuffer()[start:end])
|
|
227
|
-
except KeyError:
|
|
228
|
-
raise FileNotFoundError(path)
|
|
227
|
+
except KeyError as e:
|
|
228
|
+
raise FileNotFoundError(path) from e
|
|
229
229
|
|
|
230
230
|
def _rm(self, path):
|
|
231
231
|
path = self._strip_protocol(path)
|
|
@@ -238,15 +238,19 @@ class MemoryFileSystem(AbstractFileSystem):
|
|
|
238
238
|
path = self._strip_protocol(path)
|
|
239
239
|
try:
|
|
240
240
|
return self.store[path].modified
|
|
241
|
-
except KeyError:
|
|
242
|
-
raise FileNotFoundError(path)
|
|
241
|
+
except KeyError as e:
|
|
242
|
+
raise FileNotFoundError(path) from e
|
|
243
243
|
|
|
244
244
|
def created(self, path):
|
|
245
245
|
path = self._strip_protocol(path)
|
|
246
246
|
try:
|
|
247
247
|
return self.store[path].created
|
|
248
|
-
except KeyError:
|
|
249
|
-
raise FileNotFoundError(path)
|
|
248
|
+
except KeyError as e:
|
|
249
|
+
raise FileNotFoundError(path) from e
|
|
250
|
+
|
|
251
|
+
def isfile(self, path):
|
|
252
|
+
path = self._strip_protocol(path)
|
|
253
|
+
return path in self.store
|
|
250
254
|
|
|
251
255
|
def rm(self, path, recursive=False, maxdepth=None):
|
|
252
256
|
if isinstance(path, str):
|
|
@@ -255,14 +259,14 @@ class MemoryFileSystem(AbstractFileSystem):
|
|
|
255
259
|
path = [self._strip_protocol(p) for p in path]
|
|
256
260
|
paths = self.expand_path(path, recursive=recursive, maxdepth=maxdepth)
|
|
257
261
|
for p in reversed(paths):
|
|
262
|
+
if self.isfile(p):
|
|
263
|
+
self.rm_file(p)
|
|
258
264
|
# If the expanded path doesn't exist, it is only because the expanded
|
|
259
265
|
# path was a directory that does not exist in self.pseudo_dirs. This
|
|
260
266
|
# is possible if you directly create files without making the
|
|
261
267
|
# directories first.
|
|
262
|
-
|
|
268
|
+
elif not self.exists(p):
|
|
263
269
|
continue
|
|
264
|
-
if self.isfile(p):
|
|
265
|
-
self.rm_file(p)
|
|
266
270
|
else:
|
|
267
271
|
self.rmdir(p)
|
|
268
272
|
|
|
@@ -5,8 +5,9 @@ import itertools
|
|
|
5
5
|
import logging
|
|
6
6
|
import math
|
|
7
7
|
import os
|
|
8
|
+
from itertools import chain
|
|
8
9
|
from functools import lru_cache
|
|
9
|
-
from typing import TYPE_CHECKING
|
|
10
|
+
from typing import TYPE_CHECKING, Literal
|
|
10
11
|
|
|
11
12
|
import fsspec.core
|
|
12
13
|
|
|
@@ -16,10 +17,10 @@ except ImportError:
|
|
|
16
17
|
if not TYPE_CHECKING:
|
|
17
18
|
import json
|
|
18
19
|
|
|
19
|
-
from
|
|
20
|
-
from
|
|
21
|
-
from
|
|
22
|
-
from
|
|
20
|
+
from fsspec.asyn import AsyncFileSystem
|
|
21
|
+
from fsspec.callbacks import DEFAULT_CALLBACK
|
|
22
|
+
from fsspec.core import filesystem, open, split_protocol
|
|
23
|
+
from fsspec.utils import isfilelike, merge_offset_ranges, other_paths
|
|
23
24
|
|
|
24
25
|
logger = logging.getLogger("fsspec.reference")
|
|
25
26
|
|
|
@@ -35,7 +36,7 @@ class ReferenceNotReachable(RuntimeError):
|
|
|
35
36
|
|
|
36
37
|
|
|
37
38
|
def _first(d):
|
|
38
|
-
return
|
|
39
|
+
return next(iter(d.values()))
|
|
39
40
|
|
|
40
41
|
|
|
41
42
|
def _prot_in_references(path, references):
|
|
@@ -103,7 +104,13 @@ class LazyReferenceMapper(collections.abc.MutableMapping):
|
|
|
103
104
|
return pd
|
|
104
105
|
|
|
105
106
|
def __init__(
|
|
106
|
-
self,
|
|
107
|
+
self,
|
|
108
|
+
root,
|
|
109
|
+
fs=None,
|
|
110
|
+
out_root=None,
|
|
111
|
+
cache_size=128,
|
|
112
|
+
categorical_threshold=10,
|
|
113
|
+
engine: Literal["fastparquet", "pyarrow"] = "fastparquet",
|
|
107
114
|
):
|
|
108
115
|
"""
|
|
109
116
|
|
|
@@ -125,17 +132,25 @@ class LazyReferenceMapper(collections.abc.MutableMapping):
|
|
|
125
132
|
Encode urls as pandas.Categorical to reduce memory footprint if the ratio
|
|
126
133
|
of the number of unique urls to total number of refs for each variable
|
|
127
134
|
is greater than or equal to this number. (default 10)
|
|
135
|
+
engine: Literal["fastparquet","pyarrow"]
|
|
136
|
+
Engine choice for reading parquet files. (default is "fastparquet")
|
|
128
137
|
"""
|
|
138
|
+
|
|
129
139
|
self.root = root
|
|
130
140
|
self.chunk_sizes = {}
|
|
131
141
|
self.out_root = out_root or self.root
|
|
132
142
|
self.cat_thresh = categorical_threshold
|
|
143
|
+
self.engine = engine
|
|
133
144
|
self.cache_size = cache_size
|
|
134
|
-
self.dirs = None
|
|
135
145
|
self.url = self.root + "/{field}/refs.{record}.parq"
|
|
136
146
|
# TODO: derive fs from `root`
|
|
137
147
|
self.fs = fsspec.filesystem("file") if fs is None else fs
|
|
138
148
|
|
|
149
|
+
from importlib.util import find_spec
|
|
150
|
+
|
|
151
|
+
if self.engine == "pyarrow" and find_spec("pyarrow") is None:
|
|
152
|
+
raise ImportError("engine choice `pyarrow` is not installed.")
|
|
153
|
+
|
|
139
154
|
def __getattr__(self, item):
|
|
140
155
|
if item in ("_items", "record_size", "zmetadata"):
|
|
141
156
|
self.setup()
|
|
@@ -158,8 +173,8 @@ class LazyReferenceMapper(collections.abc.MutableMapping):
|
|
|
158
173
|
"""cached parquet file loader"""
|
|
159
174
|
path = self.url.format(field=field, record=record)
|
|
160
175
|
data = io.BytesIO(self.fs.cat_file(path))
|
|
161
|
-
df = self.pd.read_parquet(data, engine=
|
|
162
|
-
refs = {c: df[c].
|
|
176
|
+
df = self.pd.read_parquet(data, engine=self.engine)
|
|
177
|
+
refs = {c: df[c].to_numpy() for c in df.columns}
|
|
163
178
|
return refs
|
|
164
179
|
|
|
165
180
|
self.open_refs = open_refs
|
|
@@ -195,32 +210,36 @@ class LazyReferenceMapper(collections.abc.MutableMapping):
|
|
|
195
210
|
fs.pipe("/".join([root, ".zmetadata"]), json.dumps(met).encode())
|
|
196
211
|
return LazyReferenceMapper(root, fs, **kwargs)
|
|
197
212
|
|
|
198
|
-
|
|
213
|
+
@lru_cache()
|
|
214
|
+
def listdir(self):
|
|
199
215
|
"""List top-level directories"""
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
dirs = [p.split("/", 1)[0] for p in self.zmetadata]
|
|
203
|
-
self.dirs = {p for p in dirs if p and not p.startswith(".")}
|
|
204
|
-
listing = self.dirs
|
|
205
|
-
if basename:
|
|
206
|
-
listing = [os.path.basename(path) for path in listing]
|
|
207
|
-
return listing
|
|
216
|
+
dirs = (p.rsplit("/", 1)[0] for p in self.zmetadata if not p.startswith(".z"))
|
|
217
|
+
return set(dirs)
|
|
208
218
|
|
|
209
219
|
def ls(self, path="", detail=True):
|
|
210
220
|
"""Shortcut file listings"""
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
)
|
|
221
|
+
path = path.rstrip("/")
|
|
222
|
+
pathdash = path + "/" if path else ""
|
|
223
|
+
dirnames = self.listdir()
|
|
224
|
+
dirs = [
|
|
225
|
+
d
|
|
226
|
+
for d in dirnames
|
|
227
|
+
if d.startswith(pathdash) and "/" not in d.lstrip(pathdash)
|
|
228
|
+
]
|
|
229
|
+
if dirs:
|
|
230
|
+
others = {
|
|
231
|
+
f
|
|
232
|
+
for f in chain(
|
|
233
|
+
[".zmetadata"],
|
|
234
|
+
(name for name in self.zmetadata),
|
|
235
|
+
(name for name in self._items),
|
|
236
|
+
)
|
|
237
|
+
if f.startswith(pathdash) and "/" not in f.lstrip(pathdash)
|
|
238
|
+
}
|
|
218
239
|
if detail is False:
|
|
219
|
-
others.update(
|
|
240
|
+
others.update(dirs)
|
|
220
241
|
return sorted(others)
|
|
221
|
-
dirinfo = [
|
|
222
|
-
{"name": name, "type": "directory", "size": 0} for name in dirnames
|
|
223
|
-
]
|
|
242
|
+
dirinfo = [{"name": name, "type": "directory", "size": 0} for name in dirs]
|
|
224
243
|
fileinfo = [
|
|
225
244
|
{
|
|
226
245
|
"name": name,
|
|
@@ -234,10 +253,7 @@ class LazyReferenceMapper(collections.abc.MutableMapping):
|
|
|
234
253
|
for name in others
|
|
235
254
|
]
|
|
236
255
|
return sorted(dirinfo + fileinfo, key=lambda s: s["name"])
|
|
237
|
-
|
|
238
|
-
if len(parts) > 1:
|
|
239
|
-
raise FileNotFoundError("Cannot list within directories right now")
|
|
240
|
-
field = parts[0]
|
|
256
|
+
field = path
|
|
241
257
|
others = set(
|
|
242
258
|
[name for name in self.zmetadata if name.startswith(f"{path}/")]
|
|
243
259
|
+ [name for name in self._items if name.startswith(f"{path}/")]
|
|
@@ -291,8 +307,8 @@ class LazyReferenceMapper(collections.abc.MutableMapping):
|
|
|
291
307
|
# Chunk keys can be loaded from row group and cached in LRU cache
|
|
292
308
|
try:
|
|
293
309
|
refs = self.open_refs(field, record)
|
|
294
|
-
except (ValueError, TypeError, FileNotFoundError):
|
|
295
|
-
raise KeyError(key)
|
|
310
|
+
except (ValueError, TypeError, FileNotFoundError) as exc:
|
|
311
|
+
raise KeyError(key) from exc
|
|
296
312
|
columns = ["path", "offset", "size", "raw"]
|
|
297
313
|
selection = [refs[c][ri] if c in refs else None for c in columns]
|
|
298
314
|
raw = selection[-1]
|
|
@@ -462,18 +478,28 @@ class LazyReferenceMapper(collections.abc.MutableMapping):
|
|
|
462
478
|
|
|
463
479
|
fn = f"{base_url or self.out_root}/{field}/refs.{record}.parq"
|
|
464
480
|
self.fs.mkdirs(f"{base_url or self.out_root}/{field}", exist_ok=True)
|
|
481
|
+
|
|
482
|
+
if self.engine == "pyarrow":
|
|
483
|
+
df_backend_kwargs = {"write_statistics": False}
|
|
484
|
+
elif self.engine == "fastparquet":
|
|
485
|
+
df_backend_kwargs = {
|
|
486
|
+
"stats": False,
|
|
487
|
+
"object_encoding": object_encoding,
|
|
488
|
+
"has_nulls": has_nulls,
|
|
489
|
+
}
|
|
490
|
+
else:
|
|
491
|
+
raise NotImplementedError(f"{self.engine} not supported")
|
|
492
|
+
|
|
465
493
|
df.to_parquet(
|
|
466
494
|
fn,
|
|
467
|
-
engine=
|
|
495
|
+
engine=self.engine,
|
|
468
496
|
storage_options=storage_options
|
|
469
497
|
or getattr(self.fs, "storage_options", None),
|
|
470
498
|
compression="zstd",
|
|
471
499
|
index=False,
|
|
472
|
-
|
|
473
|
-
object_encoding=object_encoding,
|
|
474
|
-
has_nulls=has_nulls,
|
|
475
|
-
# **kwargs,
|
|
500
|
+
**df_backend_kwargs,
|
|
476
501
|
)
|
|
502
|
+
|
|
477
503
|
partition.clear()
|
|
478
504
|
self._items.pop((field, record))
|
|
479
505
|
|
|
@@ -485,6 +511,7 @@ class LazyReferenceMapper(collections.abc.MutableMapping):
|
|
|
485
511
|
base_url: str
|
|
486
512
|
Location of the output
|
|
487
513
|
"""
|
|
514
|
+
|
|
488
515
|
# write what we have so far and clear sub chunks
|
|
489
516
|
for thing in list(self._items):
|
|
490
517
|
if isinstance(thing, tuple):
|
|
@@ -501,6 +528,7 @@ class LazyReferenceMapper(collections.abc.MutableMapping):
|
|
|
501
528
|
if k != ".zmetadata" and ".z" in k:
|
|
502
529
|
self.zmetadata[k] = json.loads(self._items.pop(k))
|
|
503
530
|
met = {"metadata": self.zmetadata, "record_size": self.record_size}
|
|
531
|
+
self._items.clear()
|
|
504
532
|
self._items[".zmetadata"] = json.dumps(met).encode()
|
|
505
533
|
self.fs.pipe(
|
|
506
534
|
"/".join([base_url or self.out_root, ".zmetadata"]),
|
|
@@ -732,8 +760,8 @@ class ReferenceFileSystem(AsyncFileSystem):
|
|
|
732
760
|
logger.debug(f"cat: {path}")
|
|
733
761
|
try:
|
|
734
762
|
part = self.references[path]
|
|
735
|
-
except KeyError:
|
|
736
|
-
raise FileNotFoundError(path)
|
|
763
|
+
except KeyError as exc:
|
|
764
|
+
raise FileNotFoundError(path) from exc
|
|
737
765
|
if isinstance(part, str):
|
|
738
766
|
part = part.encode()
|
|
739
767
|
if isinstance(part, bytes):
|
|
@@ -995,9 +1023,11 @@ class ReferenceFileSystem(AsyncFileSystem):
|
|
|
995
1023
|
out = {}
|
|
996
1024
|
for gen in gens:
|
|
997
1025
|
dimension = {
|
|
998
|
-
k:
|
|
999
|
-
|
|
1000
|
-
|
|
1026
|
+
k: (
|
|
1027
|
+
v
|
|
1028
|
+
if isinstance(v, list)
|
|
1029
|
+
else range(v.get("start", 0), v["stop"], v.get("step", 1))
|
|
1030
|
+
)
|
|
1001
1031
|
for k, v in gen["dimensions"].items()
|
|
1002
1032
|
}
|
|
1003
1033
|
products = (
|
|
@@ -1084,7 +1114,7 @@ class ReferenceFileSystem(AsyncFileSystem):
|
|
|
1084
1114
|
if self.dircache:
|
|
1085
1115
|
return path in self.dircache
|
|
1086
1116
|
elif isinstance(self.references, LazyReferenceMapper):
|
|
1087
|
-
return path in self.references.listdir(
|
|
1117
|
+
return path in self.references.listdir()
|
|
1088
1118
|
else:
|
|
1089
1119
|
# this may be faster than building dircache for single calls, but
|
|
1090
1120
|
# by looping will be slow for many calls; could cache it?
|
fsspec/implementations/smb.py
CHANGED
|
@@ -4,10 +4,12 @@ Windows Samba network shares by using package smbprotocol
|
|
|
4
4
|
"""
|
|
5
5
|
|
|
6
6
|
import datetime
|
|
7
|
+
import re
|
|
7
8
|
import uuid
|
|
8
9
|
from stat import S_ISDIR, S_ISLNK
|
|
9
10
|
|
|
10
11
|
import smbclient
|
|
12
|
+
import smbprotocol.exceptions
|
|
11
13
|
|
|
12
14
|
from .. import AbstractFileSystem
|
|
13
15
|
from ..utils import infer_storage_options
|
|
@@ -67,7 +69,9 @@ class SMBFileSystem(AbstractFileSystem):
|
|
|
67
69
|
timeout=60,
|
|
68
70
|
encrypt=None,
|
|
69
71
|
share_access=None,
|
|
70
|
-
register_session_retries=
|
|
72
|
+
register_session_retries=4,
|
|
73
|
+
register_session_retry_wait=1,
|
|
74
|
+
register_session_retry_factor=10,
|
|
71
75
|
auto_mkdir=False,
|
|
72
76
|
**kwargs,
|
|
73
77
|
):
|
|
@@ -103,6 +107,19 @@ class SMBFileSystem(AbstractFileSystem):
|
|
|
103
107
|
- 'r': Allow other handles to be opened with read access.
|
|
104
108
|
- 'w': Allow other handles to be opened with write access.
|
|
105
109
|
- 'd': Allow other handles to be opened with delete access.
|
|
110
|
+
register_session_retries: int
|
|
111
|
+
Number of retries to register a session with the server. Retries are not performed
|
|
112
|
+
for authentication errors, as they are considered as invalid credentials and not network
|
|
113
|
+
issues. If set to negative value, no register attempts will be performed.
|
|
114
|
+
register_session_retry_wait: int
|
|
115
|
+
Time in seconds to wait between each retry. Number must be non-negative.
|
|
116
|
+
register_session_retry_factor: int
|
|
117
|
+
Base factor for the wait time between each retry. The wait time
|
|
118
|
+
is calculated using exponential function. For factor=1 all wait times
|
|
119
|
+
will be equal to `register_session_retry_wait`. For any number of retries,
|
|
120
|
+
the last wait time will be equal to `register_session_retry_wait` and for retries>1
|
|
121
|
+
the first wait time will be equal to `register_session_retry_wait / factor`.
|
|
122
|
+
Number must be equal to or greater than 1. Optimal factor is 10.
|
|
106
123
|
auto_mkdir: bool
|
|
107
124
|
Whether, when opening a file, the directory containing it should
|
|
108
125
|
be created (if it doesn't already exist). This is assumed by pyarrow
|
|
@@ -118,6 +135,17 @@ class SMBFileSystem(AbstractFileSystem):
|
|
|
118
135
|
self.temppath = kwargs.pop("temppath", "")
|
|
119
136
|
self.share_access = share_access
|
|
120
137
|
self.register_session_retries = register_session_retries
|
|
138
|
+
if register_session_retry_wait < 0:
|
|
139
|
+
raise ValueError(
|
|
140
|
+
"register_session_retry_wait must be a non-negative integer"
|
|
141
|
+
)
|
|
142
|
+
self.register_session_retry_wait = register_session_retry_wait
|
|
143
|
+
if register_session_retry_factor < 1:
|
|
144
|
+
raise ValueError(
|
|
145
|
+
"register_session_retry_factor must be a positive "
|
|
146
|
+
"integer equal to or greater than 1"
|
|
147
|
+
)
|
|
148
|
+
self.register_session_retry_factor = register_session_retry_factor
|
|
121
149
|
self.auto_mkdir = auto_mkdir
|
|
122
150
|
self._connect()
|
|
123
151
|
|
|
@@ -128,7 +156,26 @@ class SMBFileSystem(AbstractFileSystem):
|
|
|
128
156
|
def _connect(self):
|
|
129
157
|
import time
|
|
130
158
|
|
|
131
|
-
|
|
159
|
+
if self.register_session_retries <= -1:
|
|
160
|
+
return
|
|
161
|
+
|
|
162
|
+
retried_errors = []
|
|
163
|
+
|
|
164
|
+
wait_time = self.register_session_retry_wait
|
|
165
|
+
n_waits = (
|
|
166
|
+
self.register_session_retries - 1
|
|
167
|
+
) # -1 = No wait time after the last retry
|
|
168
|
+
factor = self.register_session_retry_factor
|
|
169
|
+
|
|
170
|
+
# Generate wait times for each retry attempt.
|
|
171
|
+
# Wait times are calculated using exponential function. For factor=1 all wait times
|
|
172
|
+
# will be equal to `wait`. For any number of retries the last wait time will be
|
|
173
|
+
# equal to `wait` and for retries>2 the first wait time will be equal to `wait / factor`.
|
|
174
|
+
wait_times = iter(
|
|
175
|
+
factor ** (n / n_waits - 1) * wait_time for n in range(0, n_waits + 1)
|
|
176
|
+
)
|
|
177
|
+
|
|
178
|
+
for attempt in range(self.register_session_retries + 1):
|
|
132
179
|
try:
|
|
133
180
|
smbclient.register_session(
|
|
134
181
|
self.host,
|
|
@@ -138,9 +185,35 @@ class SMBFileSystem(AbstractFileSystem):
|
|
|
138
185
|
encrypt=self.encrypt,
|
|
139
186
|
connection_timeout=self.timeout,
|
|
140
187
|
)
|
|
141
|
-
|
|
142
|
-
except
|
|
143
|
-
|
|
188
|
+
return
|
|
189
|
+
except (
|
|
190
|
+
smbprotocol.exceptions.SMBAuthenticationError,
|
|
191
|
+
smbprotocol.exceptions.LogonFailure,
|
|
192
|
+
):
|
|
193
|
+
# These exceptions should not be repeated, as they clearly indicate
|
|
194
|
+
# that the credentials are invalid and not a network issue.
|
|
195
|
+
raise
|
|
196
|
+
except ValueError as exc:
|
|
197
|
+
if re.findall(r"\[Errno -\d+]", str(exc)):
|
|
198
|
+
# This exception is raised by the smbprotocol.transport:Tcp.connect
|
|
199
|
+
# and originates from socket.gaierror (OSError). These exceptions might
|
|
200
|
+
# be raised due to network instability. We will retry to connect.
|
|
201
|
+
retried_errors.append(exc)
|
|
202
|
+
else:
|
|
203
|
+
# All another ValueError exceptions should be raised, as they are not
|
|
204
|
+
# related to network issues.
|
|
205
|
+
raise
|
|
206
|
+
except Exception as exc:
|
|
207
|
+
# Save the exception and retry to connect. This except might be dropped
|
|
208
|
+
# in the future, once all exceptions suited for retry are identified.
|
|
209
|
+
retried_errors.append(exc)
|
|
210
|
+
|
|
211
|
+
if attempt < self.register_session_retries:
|
|
212
|
+
time.sleep(next(wait_times))
|
|
213
|
+
|
|
214
|
+
# Raise last exception to inform user about the connection issues.
|
|
215
|
+
# Note: Should we use ExceptionGroup to raise all exceptions?
|
|
216
|
+
raise retried_errors[-1]
|
|
144
217
|
|
|
145
218
|
@classmethod
|
|
146
219
|
def _strip_protocol(cls, path):
|
|
@@ -102,7 +102,7 @@ class WebHDFS(AbstractFileSystem):
|
|
|
102
102
|
if self._cached:
|
|
103
103
|
return
|
|
104
104
|
super().__init__(**kwargs)
|
|
105
|
-
self.url = f"{'https' if use_https else 'http'}://{host}:{port}/webhdfs/v1"
|
|
105
|
+
self.url = f"{'https' if use_https else 'http'}://{host}:{port}/webhdfs/v1"
|
|
106
106
|
self.kerb = kerberos
|
|
107
107
|
self.kerb_kwargs = kerb_kwargs or {}
|
|
108
108
|
self.pars = {}
|
|
@@ -393,7 +393,7 @@ class WebHDFS(AbstractFileSystem):
|
|
|
393
393
|
with self.open(tmp_fname, "wb") as rstream:
|
|
394
394
|
shutil.copyfileobj(lstream, rstream)
|
|
395
395
|
self.mv(tmp_fname, rpath)
|
|
396
|
-
except BaseException:
|
|
396
|
+
except BaseException:
|
|
397
397
|
with suppress(FileNotFoundError):
|
|
398
398
|
self.rm(tmp_fname)
|
|
399
399
|
raise
|
fsspec/implementations/zip.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import os
|
|
1
2
|
import zipfile
|
|
2
3
|
|
|
3
4
|
import fsspec
|
|
@@ -48,7 +49,7 @@ class ZipFileSystem(AbstractArchiveFileSystem):
|
|
|
48
49
|
if mode not in set("rwa"):
|
|
49
50
|
raise ValueError(f"mode '{mode}' no understood")
|
|
50
51
|
self.mode = mode
|
|
51
|
-
if isinstance(fo, str):
|
|
52
|
+
if isinstance(fo, (str, os.PathLike)):
|
|
52
53
|
if mode == "a":
|
|
53
54
|
m = "r+b"
|
|
54
55
|
else:
|
|
@@ -132,3 +133,45 @@ class ZipFileSystem(AbstractArchiveFileSystem):
|
|
|
132
133
|
out.size = info["size"]
|
|
133
134
|
out.name = info["name"]
|
|
134
135
|
return out
|
|
136
|
+
|
|
137
|
+
def find(self, path, maxdepth=None, withdirs=False, detail=False, **kwargs):
|
|
138
|
+
if maxdepth is not None and maxdepth < 1:
|
|
139
|
+
raise ValueError("maxdepth must be at least 1")
|
|
140
|
+
|
|
141
|
+
# Remove the leading slash, as the zip file paths are always
|
|
142
|
+
# given without a leading slash
|
|
143
|
+
path = path.lstrip("/")
|
|
144
|
+
path_parts = list(filter(lambda s: bool(s), path.split("/")))
|
|
145
|
+
|
|
146
|
+
def _matching_starts(file_path):
|
|
147
|
+
file_parts = filter(lambda s: bool(s), file_path.split("/"))
|
|
148
|
+
return all(a == b for a, b in zip(path_parts, file_parts))
|
|
149
|
+
|
|
150
|
+
self._get_dirs()
|
|
151
|
+
|
|
152
|
+
result = {}
|
|
153
|
+
# To match posix find, if an exact file name is given, we should
|
|
154
|
+
# return only that file
|
|
155
|
+
if path in self.dir_cache and self.dir_cache[path]["type"] == "file":
|
|
156
|
+
result[path] = self.dir_cache[path]
|
|
157
|
+
return result if detail else [path]
|
|
158
|
+
|
|
159
|
+
for file_path, file_info in self.dir_cache.items():
|
|
160
|
+
if not (path == "" or _matching_starts(file_path)):
|
|
161
|
+
continue
|
|
162
|
+
|
|
163
|
+
if file_info["type"] == "directory":
|
|
164
|
+
if withdirs:
|
|
165
|
+
if file_path not in result:
|
|
166
|
+
result[file_path.strip("/")] = file_info
|
|
167
|
+
continue
|
|
168
|
+
|
|
169
|
+
if file_path not in result:
|
|
170
|
+
result[file_path] = file_info if detail else None
|
|
171
|
+
|
|
172
|
+
if maxdepth:
|
|
173
|
+
path_depth = path.count("/")
|
|
174
|
+
result = {
|
|
175
|
+
k: v for k, v in result.items() if k.count("/") - path_depth < maxdepth
|
|
176
|
+
}
|
|
177
|
+
return result if detail else sorted(result)
|
fsspec/mapping.py
CHANGED
|
@@ -153,10 +153,10 @@ class FSMap(MutableMapping):
|
|
|
153
153
|
k = self._key_to_str(key)
|
|
154
154
|
try:
|
|
155
155
|
result = self.fs.cat(k)
|
|
156
|
-
except self.missing_exceptions:
|
|
156
|
+
except self.missing_exceptions as exc:
|
|
157
157
|
if default is not None:
|
|
158
158
|
return default
|
|
159
|
-
raise KeyError(key)
|
|
159
|
+
raise KeyError(key) from exc
|
|
160
160
|
return result
|
|
161
161
|
|
|
162
162
|
def pop(self, key, default=None):
|
|
@@ -184,8 +184,8 @@ class FSMap(MutableMapping):
|
|
|
184
184
|
"""Remove key"""
|
|
185
185
|
try:
|
|
186
186
|
self.fs.rm(self._key_to_str(key))
|
|
187
|
-
except
|
|
188
|
-
raise KeyError
|
|
187
|
+
except Exception as exc:
|
|
188
|
+
raise KeyError from exc
|
|
189
189
|
|
|
190
190
|
def __contains__(self, key):
|
|
191
191
|
"""Does key exist in mapping?"""
|
fsspec/spec.py
CHANGED
|
@@ -428,11 +428,9 @@ class AbstractFileSystem(metaclass=_Cached):
|
|
|
428
428
|
except (FileNotFoundError, OSError) as e:
|
|
429
429
|
if on_error == "raise":
|
|
430
430
|
raise
|
|
431
|
-
|
|
431
|
+
if callable(on_error):
|
|
432
432
|
on_error(e)
|
|
433
|
-
|
|
434
|
-
return path, {}, {}
|
|
435
|
-
return path, [], []
|
|
433
|
+
return
|
|
436
434
|
|
|
437
435
|
for info in listing:
|
|
438
436
|
# each info name must be at least [path]/part , but here
|
|
@@ -650,7 +648,7 @@ class AbstractFileSystem(metaclass=_Cached):
|
|
|
650
648
|
Returns a single dictionary, with exactly the same information as ``ls``
|
|
651
649
|
would with ``detail=True``.
|
|
652
650
|
|
|
653
|
-
The default implementation
|
|
651
|
+
The default implementation calls ls and could be overridden by a
|
|
654
652
|
shortcut. kwargs are passed on to ```ls()``.
|
|
655
653
|
|
|
656
654
|
Some file systems might not be able to measure the file's size, in
|
|
@@ -1892,7 +1890,7 @@ class AbstractBufferedFile(io.IOBase):
|
|
|
1892
1890
|
self.offset = 0
|
|
1893
1891
|
try:
|
|
1894
1892
|
self._initiate_upload()
|
|
1895
|
-
except:
|
|
1893
|
+
except:
|
|
1896
1894
|
self.closed = True
|
|
1897
1895
|
raise
|
|
1898
1896
|
|
|
@@ -4,9 +4,9 @@ from hashlib import md5
|
|
|
4
4
|
import pytest
|
|
5
5
|
|
|
6
6
|
from fsspec.implementations.local import LocalFileSystem
|
|
7
|
-
from fsspec.tests.abstract.copy import AbstractCopyTests # noqa
|
|
8
|
-
from fsspec.tests.abstract.get import AbstractGetTests # noqa
|
|
9
|
-
from fsspec.tests.abstract.put import AbstractPutTests # noqa
|
|
7
|
+
from fsspec.tests.abstract.copy import AbstractCopyTests # noqa: F401
|
|
8
|
+
from fsspec.tests.abstract.get import AbstractGetTests # noqa: F401
|
|
9
|
+
from fsspec.tests.abstract.put import AbstractPutTests # noqa: F401
|
|
10
10
|
|
|
11
11
|
|
|
12
12
|
class BaseAbstractFixtures:
|
fsspec/utils.py
CHANGED
|
@@ -427,10 +427,7 @@ def is_exception(obj: Any) -> bool:
|
|
|
427
427
|
|
|
428
428
|
|
|
429
429
|
def isfilelike(f: Any) -> TypeGuard[IO[bytes]]:
|
|
430
|
-
for attr in ["read", "close", "tell"]
|
|
431
|
-
if not hasattr(f, attr):
|
|
432
|
-
return False
|
|
433
|
-
return True
|
|
430
|
+
return all(hasattr(f, attr) for attr in ["read", "close", "tell"])
|
|
434
431
|
|
|
435
432
|
|
|
436
433
|
def get_protocol(url: str) -> str:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: fsspec
|
|
3
|
-
Version: 2024.
|
|
3
|
+
Version: 2024.10.0
|
|
4
4
|
Summary: File-system specification
|
|
5
5
|
Project-URL: Changelog, https://filesystem-spec.readthedocs.io/en/latest/changelog.html
|
|
6
6
|
Project-URL: Documentation, https://filesystem-spec.readthedocs.io/en/latest/
|
|
@@ -1,55 +1,55 @@
|
|
|
1
1
|
fsspec/__init__.py,sha256=l9MJaNNV2d4wKpCtMvXDr55n92DkdrAayGy3F9ICjzk,1998
|
|
2
|
-
fsspec/_version.py,sha256=
|
|
2
|
+
fsspec/_version.py,sha256=TRFHcujqz6-GeA85-A44TpoeoMj1E4TmW_pLohsSakA,419
|
|
3
3
|
fsspec/archive.py,sha256=S__DzfZj-urAN3tp2W6jJ6YDiXG1fAl7FjvWUN73qIE,2386
|
|
4
|
-
fsspec/asyn.py,sha256=
|
|
4
|
+
fsspec/asyn.py,sha256=KUi-txo8VDtMUbrgEOu7i6tnJzMPTR2XHU0u70t7nCY,36512
|
|
5
5
|
fsspec/caching.py,sha256=x6IEdxtR3cMDjy40sNHyawR2SLtNSahVuP5i_TImdso,31600
|
|
6
6
|
fsspec/callbacks.py,sha256=BDIwLzK6rr_0V5ch557fSzsivCElpdqhXr5dZ9Te-EE,9210
|
|
7
7
|
fsspec/compression.py,sha256=jCSUMJu-zSNyrusnHT0wKXgOd1tTJR6vM126i5SR5Zc,4865
|
|
8
8
|
fsspec/config.py,sha256=LF4Zmu1vhJW7Je9Q-cwkRc3xP7Rhyy7Xnwj26Z6sv2g,4279
|
|
9
9
|
fsspec/conftest.py,sha256=fVfx-NLrH_OZS1TIpYNoPzM7efEcMoL62reHOdYeFCA,1245
|
|
10
|
-
fsspec/core.py,sha256=
|
|
10
|
+
fsspec/core.py,sha256=pXNmJ0qgMO-BTOKPoPWkBdeScXdrKJZ2gjOtdK8x0b0,23775
|
|
11
11
|
fsspec/dircache.py,sha256=YzogWJrhEastHU7vWz-cJiJ7sdtLXFXhEpInGKd4EcM,2717
|
|
12
12
|
fsspec/exceptions.py,sha256=pauSLDMxzTJMOjvX1WEUK0cMyFkrFxpWJsyFywav7A8,331
|
|
13
|
-
fsspec/fuse.py,sha256=
|
|
13
|
+
fsspec/fuse.py,sha256=Q-3NOOyLqBfYa4Db5E19z_ZY36zzYHtIs1mOUasItBQ,10177
|
|
14
14
|
fsspec/generic.py,sha256=AFbo-mHBt5QJV1Aplg5CJuUiiJ4bNQhcKRuwkZJdWac,13761
|
|
15
|
-
fsspec/gui.py,sha256=
|
|
15
|
+
fsspec/gui.py,sha256=xBnHL2-r0LVwhDAtnHoPpXts7jd4Z32peawCJiI-7lI,13975
|
|
16
16
|
fsspec/json.py,sha256=65sQ0Y7mTj33u_Y4IId5up4abQ3bAel4E4QzbKMiQSg,3826
|
|
17
|
-
fsspec/mapping.py,sha256=
|
|
17
|
+
fsspec/mapping.py,sha256=CtD_GEmyYgXefQHndkxu7Zb_kbTS3mlFP2zIwlAoQTY,8289
|
|
18
18
|
fsspec/parquet.py,sha256=ONG29Enesp0ToCH2bQ7zkpimnVIsZ2S4xCLj35-fY78,19455
|
|
19
19
|
fsspec/registry.py,sha256=HVC-4HWDZnA6rycJwAu8F8ZXzON_85MTQVIyS6LOHxo,11320
|
|
20
|
-
fsspec/spec.py,sha256=
|
|
20
|
+
fsspec/spec.py,sha256=7RkKPudVJnBJ1zfM6SCPwovsQyPxvt3hXkI7rpZMePk,69491
|
|
21
21
|
fsspec/transaction.py,sha256=xliRG6U2Zf3khG4xcw9WiB-yAoqJSHEGK_VjHOdtgo0,2398
|
|
22
|
-
fsspec/utils.py,sha256=
|
|
22
|
+
fsspec/utils.py,sha256=dVaokocjhMOnO3B1KmKlgxYqojQJyzb3mgIfaAaz8Pk,22941
|
|
23
23
|
fsspec/implementations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
24
|
-
fsspec/implementations/arrow.py,sha256=
|
|
24
|
+
fsspec/implementations/arrow.py,sha256=721Dikne_lV_0tlgk9jyKmHL6W-5MT0h2LKGvOYQTPI,8623
|
|
25
25
|
fsspec/implementations/cache_mapper.py,sha256=W4wlxyPxZbSp9ItJ0pYRVBMh6bw9eFypgP6kUYuuiI4,2421
|
|
26
26
|
fsspec/implementations/cache_metadata.py,sha256=pcOJYcBQY5OaC7Yhw0F3wjg08QLYApGmoISCrbs59ks,8511
|
|
27
27
|
fsspec/implementations/cached.py,sha256=t5atYATgjuABm-mUyReqjGqVyyP1XBSuROX92aMecxY,32826
|
|
28
28
|
fsspec/implementations/dask.py,sha256=CXZbJzIVOhKV8ILcxuy3bTvcacCueAbyQxmvAkbPkrk,4466
|
|
29
29
|
fsspec/implementations/data.py,sha256=LDLczxRh8h7x39Zjrd-GgzdQHr78yYxDlrv2C9Uxb5E,1658
|
|
30
|
-
fsspec/implementations/dbfs.py,sha256=
|
|
31
|
-
fsspec/implementations/dirfs.py,sha256=
|
|
32
|
-
fsspec/implementations/ftp.py,sha256=
|
|
33
|
-
fsspec/implementations/git.py,sha256=
|
|
30
|
+
fsspec/implementations/dbfs.py,sha256=a0eNjLxyfFK7pbEa52U8K-PhNHukzdGVx1eLcVniaXY,15092
|
|
31
|
+
fsspec/implementations/dirfs.py,sha256=ymakitNNQ07tW76EShyw3rC9RvIDHl4gtuOhE_h1vUg,12032
|
|
32
|
+
fsspec/implementations/ftp.py,sha256=VpJWnQscdEKRu4fzkCtuf3jD9A74mBaerS2ijUwZ-_I,11936
|
|
33
|
+
fsspec/implementations/git.py,sha256=4SElW9U5d3k3_ITlvUAx59Yk7XLNRTqkGa2C3hCUkWM,3754
|
|
34
34
|
fsspec/implementations/github.py,sha256=eAn1kJ7VeWR6gVoVRLBYclF_rQDXSJU-xzMXpvPQWqs,8002
|
|
35
|
-
fsspec/implementations/http.py,sha256=
|
|
35
|
+
fsspec/implementations/http.py,sha256=RLklsE1WG0eQ271haPRA_4Fz4q3wfnz5n4LifH447Eg,29826
|
|
36
36
|
fsspec/implementations/jupyter.py,sha256=B2uj7OEm7yIk-vRSsO37_ND0t0EBvn4B-Su43ibN4Pg,3811
|
|
37
37
|
fsspec/implementations/libarchive.py,sha256=5_I2DiLXwQ1JC8x-K7jXu-tBwhO9dj7tFLnb0bTnVMQ,7102
|
|
38
|
-
fsspec/implementations/local.py,sha256=
|
|
39
|
-
fsspec/implementations/memory.py,sha256
|
|
40
|
-
fsspec/implementations/reference.py,sha256=
|
|
38
|
+
fsspec/implementations/local.py,sha256=DNBZhF9LYYTPR4PKedeWuk32Tztc9jlgXtGRFGX7nv4,15103
|
|
39
|
+
fsspec/implementations/memory.py,sha256=Z-eADtiIQ5_rhhPzX3t-NQVmWCsRtnxKuN-dTWKzrnM,10277
|
|
40
|
+
fsspec/implementations/reference.py,sha256=FxQ20HcV8SMB6DpHW33hpVOOsy8S_2xcVtDJRCtZ9rQ,45145
|
|
41
41
|
fsspec/implementations/sftp.py,sha256=fMY9XZcmpjszQ2tCqO_TPaJesaeD_Dv7ptYzgUPGoO0,5631
|
|
42
|
-
fsspec/implementations/smb.py,sha256=
|
|
42
|
+
fsspec/implementations/smb.py,sha256=5fhu8h06nOLBPh2c48aT7WBRqh9cEcbIwtyu06wTjec,15236
|
|
43
43
|
fsspec/implementations/tar.py,sha256=dam78Tp_CozybNqCY2JYgGBS3Uc9FuJUAT9oB0lolOs,4111
|
|
44
|
-
fsspec/implementations/webhdfs.py,sha256=
|
|
45
|
-
fsspec/implementations/zip.py,sha256=
|
|
46
|
-
fsspec/tests/abstract/__init__.py,sha256=
|
|
44
|
+
fsspec/implementations/webhdfs.py,sha256=aet-AOfMoK91C3jNu5xBxK0Mu2iaAWiL9Xfu12KyjQI,16705
|
|
45
|
+
fsspec/implementations/zip.py,sha256=9LBMHPft2OutJl2Ft-r9u_z3GptLkc2n91ur2A3bCbg,6072
|
|
46
|
+
fsspec/tests/abstract/__init__.py,sha256=o3rQBCeTTTdji0OxKdTvBvwL7q78sEIh5J5-Q-If6z0,10046
|
|
47
47
|
fsspec/tests/abstract/common.py,sha256=1GQwNo5AONzAnzZj0fWgn8NJPLXALehbsuGxS3FzWVU,4973
|
|
48
48
|
fsspec/tests/abstract/copy.py,sha256=gU5-d97U3RSde35Vp4RxPY4rWwL744HiSrJ8IBOp9-8,19967
|
|
49
49
|
fsspec/tests/abstract/get.py,sha256=vNR4HztvTR7Cj56AMo7_tx7TeYz1Jgr_2Wb8Lv-UiBY,20755
|
|
50
50
|
fsspec/tests/abstract/mv.py,sha256=k8eUEBIrRrGMsBY5OOaDXdGnQUKGwDIfQyduB6YD3Ns,1982
|
|
51
51
|
fsspec/tests/abstract/put.py,sha256=7aih17OKB_IZZh1Mkq1eBDIjobhtMQmI8x-Pw-S_aZk,21201
|
|
52
|
-
fsspec-2024.
|
|
53
|
-
fsspec-2024.
|
|
54
|
-
fsspec-2024.
|
|
55
|
-
fsspec-2024.
|
|
52
|
+
fsspec-2024.10.0.dist-info/METADATA,sha256=l8HZ_K6qpTaPGBm3jOSHa1DX6Gja2hM3t5yBrdkmv7E,11750
|
|
53
|
+
fsspec-2024.10.0.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
|
|
54
|
+
fsspec-2024.10.0.dist-info/licenses/LICENSE,sha256=LcNUls5TpzB5FcAIqESq1T53K0mzTN0ARFBnaRQH7JQ,1513
|
|
55
|
+
fsspec-2024.10.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|