fsspec 2025.10.0__py3-none-any.whl → 2026.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
fsspec/_version.py CHANGED
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '2025.10.0'
32
- __version_tuple__ = version_tuple = (2025, 10, 0)
31
+ __version__ = version = '2026.1.0'
32
+ __version_tuple__ = version_tuple = (2026, 1, 0)
33
33
 
34
34
  __commit_id__ = commit_id = None
fsspec/asyn.py CHANGED
@@ -328,6 +328,11 @@ class AsyncFileSystem(AbstractFileSystem):
328
328
  return self._loop
329
329
 
330
330
  async def _rm_file(self, path, **kwargs):
331
+ if (
332
+ inspect.iscoroutinefunction(self._rm)
333
+ and type(self)._rm is not AsyncFileSystem._rm
334
+ ):
335
+ return await self._rm(path, recursive=False, batch_size=1, **kwargs)
331
336
  raise NotImplementedError
332
337
 
333
338
  async def _rm(self, path, recursive=False, batch_size=None, **kwargs):
@@ -776,6 +781,7 @@ class AsyncFileSystem(AbstractFileSystem):
776
781
  min_idx = min(idx_star, idx_qmark, idx_brace)
777
782
 
778
783
  detail = kwargs.pop("detail", False)
784
+ withdirs = kwargs.pop("withdirs", True)
779
785
 
780
786
  if not has_magic(path):
781
787
  if await self._exists(path, **kwargs):
@@ -805,7 +811,7 @@ class AsyncFileSystem(AbstractFileSystem):
805
811
  depth = None
806
812
 
807
813
  allpaths = await self._find(
808
- root, maxdepth=depth, withdirs=True, detail=True, **kwargs
814
+ root, maxdepth=depth, withdirs=withdirs, detail=True, **kwargs
809
815
  )
810
816
 
811
817
  pattern = glob_translate(path + ("/" if ends_with_sep else ""))
fsspec/caching.py CHANGED
@@ -6,20 +6,12 @@ import logging
6
6
  import math
7
7
  import os
8
8
  import threading
9
- import warnings
10
9
  from collections import OrderedDict
10
+ from collections.abc import Callable
11
11
  from concurrent.futures import Future, ThreadPoolExecutor
12
12
  from itertools import groupby
13
13
  from operator import itemgetter
14
- from typing import (
15
- TYPE_CHECKING,
16
- Any,
17
- Callable,
18
- ClassVar,
19
- Generic,
20
- NamedTuple,
21
- TypeVar,
22
- )
14
+ from typing import TYPE_CHECKING, Any, ClassVar, Generic, NamedTuple, TypeVar
23
15
 
24
16
  if TYPE_CHECKING:
25
17
  import mmap
@@ -629,7 +621,7 @@ class KnownPartsOfAFile(BaseCache):
629
621
  fetcher: Fetcher,
630
622
  size: int,
631
623
  data: dict[tuple[int, int], bytes] | None = None,
632
- strict: bool = True,
624
+ strict: bool = False,
633
625
  **_: Any,
634
626
  ):
635
627
  super().__init__(blocksize, fetcher, size)
@@ -653,50 +645,65 @@ class KnownPartsOfAFile(BaseCache):
653
645
  else:
654
646
  self.data = {}
655
647
 
648
+ @property
649
+ def size(self):
650
+ return sum(_[1] - _[0] for _ in self.data)
651
+
652
+ @size.setter
653
+ def size(self, value):
654
+ pass
655
+
656
+ @property
657
+ def nblocks(self):
658
+ return len(self.data)
659
+
660
+ @nblocks.setter
661
+ def nblocks(self, value):
662
+ pass
663
+
656
664
  def _fetch(self, start: int | None, stop: int | None) -> bytes:
657
665
  if start is None:
658
666
  start = 0
659
667
  if stop is None:
660
668
  stop = self.size
669
+ self.total_requested_bytes += stop - start
661
670
 
662
671
  out = b""
663
- for (loc0, loc1), data in self.data.items():
664
- # If self.strict=False, use zero-padded data
665
- # for reads beyond the end of a "known" buffer
672
+ started = False
673
+ loc_old = 0
674
+ for loc0, loc1 in sorted(self.data):
675
+ if (loc0 <= start < loc1) and (loc0 <= stop <= loc1):
676
+ # entirely within the block
677
+ off = start - loc0
678
+ self.hit_count += 1
679
+ return self.data[(loc0, loc1)][off : off + stop - start]
680
+ if stop <= loc0:
681
+ break
682
+ if started and loc0 > loc_old:
683
+ # a gap where we need data
684
+ self.miss_count += 1
685
+ if self.strict:
686
+ raise ValueError
687
+ out += b"\x00" * (loc0 - loc_old)
666
688
  if loc0 <= start < loc1:
689
+ # found the start
690
+ self.hit_count += 1
667
691
  off = start - loc0
668
- out = data[off : off + stop - start]
669
- if not self.strict or loc0 <= stop <= loc1:
670
- # The request is within a known range, or
671
- # it begins within a known range, and we
672
- # are allowed to pad reads beyond the
673
- # buffer with zero
674
- out += b"\x00" * (stop - start - len(out))
675
- self.hit_count += 1
676
- return out
677
- else:
678
- # The request ends outside a known range,
679
- # and we are being "strict" about reads
680
- # beyond the buffer
681
- start = loc1
682
- break
683
-
684
- # We only get here if there is a request outside the
685
- # known parts of the file. In an ideal world, this
686
- # should never happen
687
- if self.fetcher is None:
688
- # We cannot fetch the data, so raise an error
689
- raise ValueError(f"Read is outside the known file parts: {(start, stop)}. ")
690
- # We can fetch the data, but should warn the user
691
- # that this may be slow
692
- warnings.warn(
693
- f"Read is outside the known file parts: {(start, stop)}. "
694
- f"IO/caching performance may be poor!"
695
- )
696
- logger.debug(f"KnownPartsOfAFile cache fetching {start}-{stop}")
697
- self.total_requested_bytes += stop - start
692
+ out = self.data[(loc0, loc1)][off : off + stop - start]
693
+ started = True
694
+ elif start < loc0 and stop > loc1:
695
+ # the whole block
696
+ self.hit_count += 1
697
+ out += self.data[(loc0, loc1)]
698
+ elif loc0 <= stop <= loc1:
699
+ # end block
700
+ self.hit_count += 1
701
+ return out + self.data[(loc0, loc1)][: stop - loc0]
702
+ loc_old = loc1
698
703
  self.miss_count += 1
699
- return out + super()._fetch(start, stop)
704
+ if started and not self.strict:
705
+ return out + b"\x00" * (stop - loc_old)
706
+ raise ValueError
700
707
 
701
708
 
702
709
  class UpdatableLRU(Generic[P, T]):
fsspec/compression.py CHANGED
@@ -1,5 +1,6 @@
1
1
  """Helper functions for a standard streaming compression API"""
2
2
 
3
+ import sys
3
4
  from zipfile import ZipFile
4
5
 
5
6
  import fsspec.utils
@@ -155,26 +156,14 @@ except ImportError:
155
156
  pass
156
157
 
157
158
  try:
158
- # zstd in the standard library for python >= 3.14
159
- from compression.zstd import ZstdFile
160
-
161
- register_compression("zstd", ZstdFile, "zst")
159
+ if sys.version_info >= (3, 14):
160
+ from compression import zstd
161
+ else:
162
+ from backports import zstd
162
163
 
164
+ register_compression("zstd", zstd.ZstdFile, "zst")
163
165
  except ImportError:
164
- try:
165
- import zstandard as zstd
166
-
167
- def zstandard_file(infile, mode="rb"):
168
- if "r" in mode:
169
- cctx = zstd.ZstdDecompressor()
170
- return cctx.stream_reader(infile)
171
- else:
172
- cctx = zstd.ZstdCompressor(level=10)
173
- return cctx.stream_writer(infile)
174
-
175
- register_compression("zstd", zstandard_file, "zst")
176
- except ImportError:
177
- pass
166
+ pass
178
167
 
179
168
 
180
169
  def available_compressions():
fsspec/core.py CHANGED
@@ -18,7 +18,7 @@ from fsspec.caching import ( # noqa: F401
18
18
  )
19
19
  from fsspec.compression import compr
20
20
  from fsspec.config import conf
21
- from fsspec.registry import filesystem, get_filesystem_class
21
+ from fsspec.registry import available_protocols, filesystem, get_filesystem_class
22
22
  from fsspec.utils import (
23
23
  _unstrip_protocol,
24
24
  build_name_function,
@@ -334,34 +334,51 @@ def _un_chain(path, kwargs):
334
334
 
335
335
  if "::" in path:
336
336
  x = re.compile(".*[^a-z]+.*") # test for non protocol-like single word
337
+ known_protocols = set(available_protocols())
337
338
  bits = []
339
+
340
+ # split on '::', then ensure each bit has a protocol
338
341
  for p in path.split("::"):
339
- if "://" in p or x.match(p):
342
+ if p in known_protocols:
343
+ bits.append(p + "://")
344
+ elif "://" in p or x.match(p):
340
345
  bits.append(p)
341
346
  else:
342
347
  bits.append(p + "://")
343
348
  else:
344
349
  bits = [path]
350
+
345
351
  # [[url, protocol, kwargs], ...]
346
352
  out = []
347
353
  previous_bit = None
348
354
  kwargs = kwargs.copy()
355
+
349
356
  for bit in reversed(bits):
350
357
  protocol = kwargs.pop("protocol", None) or split_protocol(bit)[0] or "file"
351
358
  cls = get_filesystem_class(protocol)
352
359
  extra_kwargs = cls._get_kwargs_from_urls(bit)
353
360
  kws = kwargs.pop(protocol, {})
361
+
354
362
  if bit is bits[0]:
355
363
  kws.update(kwargs)
364
+
356
365
  kw = dict(
357
366
  **{k: v for k, v in extra_kwargs.items() if k not in kws or v != kws[k]},
358
367
  **kws,
359
368
  )
360
369
  bit = cls._strip_protocol(bit)
361
- if "target_protocol" not in kw and issubclass(cls, ChainedFileSystem):
370
+
371
+ if (
372
+ "target_protocol" not in kw
373
+ and issubclass(cls, ChainedFileSystem)
374
+ and not bit
375
+ ):
376
+ # replace bit if we are chaining and no path given
362
377
  bit = previous_bit
378
+
363
379
  out.append((bit, protocol, kw))
364
380
  previous_bit = bit
381
+
365
382
  out.reverse()
366
383
  return out
367
384
 
@@ -205,11 +205,11 @@ class ArrowFSWrapper(AbstractFileSystem):
205
205
  return self.fs.get_file_info(path).mtime
206
206
 
207
207
  def cat_file(self, path, start=None, end=None, **kwargs):
208
- kwargs["seekable"] = start not in [None, 0]
208
+ kwargs.setdefault("seekable", start not in [None, 0])
209
209
  return super().cat_file(path, start=None, end=None, **kwargs)
210
210
 
211
211
  def get_file(self, rpath, lpath, **kwargs):
212
- kwargs["seekable"] = False
212
+ kwargs.setdefault("seekable", False)
213
213
  super().get_file(rpath, lpath, **kwargs)
214
214
 
215
215
 
@@ -223,7 +223,6 @@ class ArrowFSWrapper(AbstractFileSystem):
223
223
  "readable",
224
224
  "writable",
225
225
  "close",
226
- "size",
227
226
  "seekable",
228
227
  ],
229
228
  )
@@ -241,6 +240,10 @@ class ArrowFile(io.IOBase):
241
240
  def __enter__(self):
242
241
  return self
243
242
 
243
+ @property
244
+ def size(self):
245
+ return self.stream.size()
246
+
244
247
  def __exit__(self, *args):
245
248
  return self.close()
246
249
 
@@ -5,6 +5,8 @@ import inspect
5
5
  import fsspec
6
6
  from fsspec.asyn import AsyncFileSystem, running_async
7
7
 
8
+ from .chained import ChainedFileSystem
9
+
8
10
 
9
11
  def async_wrapper(func, obj=None, semaphore=None):
10
12
  """
@@ -35,7 +37,7 @@ def async_wrapper(func, obj=None, semaphore=None):
35
37
  return wrapper
36
38
 
37
39
 
38
- class AsyncFileSystemWrapper(AsyncFileSystem):
40
+ class AsyncFileSystemWrapper(AsyncFileSystem, ChainedFileSystem):
39
41
  """
40
42
  A wrapper class to convert a synchronous filesystem into an asynchronous one.
41
43
 
@@ -15,9 +15,7 @@ except ImportError:
15
15
 
16
16
  if TYPE_CHECKING:
17
17
  from collections.abc import Iterator
18
- from typing import Any, Literal
19
-
20
- from typing_extensions import TypeAlias
18
+ from typing import Any, Literal, TypeAlias
21
19
 
22
20
  from .cached import CachingFileSystem
23
21
 
@@ -6,8 +6,9 @@ import os
6
6
  import tempfile
7
7
  import time
8
8
  import weakref
9
+ from collections.abc import Callable
9
10
  from shutil import rmtree
10
- from typing import TYPE_CHECKING, Any, Callable, ClassVar
11
+ from typing import TYPE_CHECKING, Any, ClassVar
11
12
 
12
13
  from fsspec import filesystem
13
14
  from fsspec.callbacks import DEFAULT_CALLBACK
@@ -1,6 +1,5 @@
1
1
  import base64
2
2
  import io
3
- from typing import Optional
4
3
  from urllib.parse import unquote
5
4
 
6
5
  from fsspec import AbstractFileSystem
@@ -50,7 +49,7 @@ class DataFileSystem(AbstractFileSystem):
50
49
  return io.BytesIO(self.cat_file(path))
51
50
 
52
51
  @staticmethod
53
- def encode(data: bytes, mime: Optional[str] = None):
52
+ def encode(data: bytes, mime: str | None = None):
54
53
  """Format the given data into data-URL syntax
55
54
 
56
55
  This version always base64 encodes, even when the data is ascii/url-safe.
@@ -1,8 +1,9 @@
1
1
  from .. import filesystem
2
2
  from ..asyn import AsyncFileSystem
3
+ from .chained import ChainedFileSystem
3
4
 
4
5
 
5
- class DirFileSystem(AsyncFileSystem):
6
+ class DirFileSystem(AsyncFileSystem, ChainedFileSystem):
6
7
  """Directory prefix filesystem
7
8
 
8
9
  The DirFileSystem is a filesystem-wrapper. It assumes every path it is dealing with
@@ -1,4 +1,5 @@
1
1
  import os
2
+ import ssl
2
3
  import uuid
3
4
  from ftplib import FTP, FTP_TLS, Error, error_perm
4
5
  from typing import Any
@@ -6,6 +7,37 @@ from typing import Any
6
7
  from ..spec import AbstractBufferedFile, AbstractFileSystem
7
8
  from ..utils import infer_storage_options, isfilelike
8
9
 
10
+ SECURITY_PROTOCOL_MAP = {
11
+ "tls": ssl.PROTOCOL_TLS,
12
+ "tlsv1": ssl.PROTOCOL_TLSv1,
13
+ "tlsv1_1": ssl.PROTOCOL_TLSv1_1,
14
+ "tlsv1_2": ssl.PROTOCOL_TLSv1_2,
15
+ "sslv23": ssl.PROTOCOL_SSLv23,
16
+ }
17
+
18
+
19
+ class ImplicitFTPTLS(FTP_TLS):
20
+ """
21
+ FTP_TLS subclass that automatically wraps sockets in SSL
22
+ to support implicit FTPS.
23
+ """
24
+
25
+ def __init__(self, *args, **kwargs):
26
+ super().__init__(*args, **kwargs)
27
+ self._sock = None
28
+
29
+ @property
30
+ def sock(self):
31
+ """Return the socket."""
32
+ return self._sock
33
+
34
+ @sock.setter
35
+ def sock(self, value):
36
+ """When modifying the socket, ensure that it is ssl wrapped."""
37
+ if value is not None and not isinstance(value, ssl.SSLSocket):
38
+ value = self.context.wrap_socket(value)
39
+ self._sock = value
40
+
9
41
 
10
42
  class FTPFileSystem(AbstractFileSystem):
11
43
  """A filesystem over classic FTP"""
@@ -55,8 +87,14 @@ class FTPFileSystem(AbstractFileSystem):
55
87
  Timeout of the ftp connection in seconds
56
88
  encoding: str
57
89
  Encoding to use for directories and filenames in FTP connection
58
- tls: bool
59
- Use FTP-TLS, by default False
90
+ tls: bool or str
91
+ Enable FTP-TLS for secure connections:
92
+ - False: Plain FTP (default)
93
+ - True: Explicit TLS (FTPS with AUTH TLS command)
94
+ - "tls": Auto-negotiate highest protocol
95
+ - "tlsv1": TLS v1.0
96
+ - "tlsv1_1": TLS v1.1
97
+ - "tlsv1_2": TLS v1.2
60
98
  """
61
99
  super().__init__(**kwargs)
62
100
  self.host = host
@@ -71,15 +109,27 @@ class FTPFileSystem(AbstractFileSystem):
71
109
  self.blocksize = 2**16
72
110
  self.tls = tls
73
111
  self._connect()
74
- if self.tls:
112
+ if isinstance(self.tls, bool) and self.tls:
75
113
  self.ftp.prot_p()
76
114
 
77
115
  def _connect(self):
116
+ security = None
78
117
  if self.tls:
79
- ftp_cls = FTP_TLS
118
+ if isinstance(self.tls, str):
119
+ ftp_cls = ImplicitFTPTLS
120
+ security = SECURITY_PROTOCOL_MAP.get(
121
+ self.tls,
122
+ f"Not supported {self.tls} protocol",
123
+ )
124
+ if isinstance(security, str):
125
+ raise ValueError(security)
126
+ else:
127
+ ftp_cls = FTP_TLS
80
128
  else:
81
129
  ftp_cls = FTP
82
130
  self.ftp = ftp_cls(timeout=self.timeout, encoding=self.encoding)
131
+ if security:
132
+ self.ftp.ssl_version = security
83
133
  self.ftp.connect(self.host, self.port)
84
134
  self.ftp.login(*self.cred)
85
135
 
@@ -327,7 +327,7 @@ class HTTPFileSystem(AsyncFileSystem):
327
327
  async with meth(self.encode_url(rpath), data=gen_chunks(), **kw) as resp:
328
328
  self._raise_not_found_for_status(resp, rpath)
329
329
 
330
- async def _exists(self, path, **kwargs):
330
+ async def _exists(self, path, strict=False, **kwargs):
331
331
  kw = self.kwargs.copy()
332
332
  kw.update(kwargs)
333
333
  try:
@@ -335,8 +335,14 @@ class HTTPFileSystem(AsyncFileSystem):
335
335
  session = await self.set_session()
336
336
  r = await session.get(self.encode_url(path), **kw)
337
337
  async with r:
338
+ if strict:
339
+ self._raise_not_found_for_status(r, path)
338
340
  return r.status < 400
341
+ except FileNotFoundError:
342
+ return False
339
343
  except aiohttp.ClientError:
344
+ if strict:
345
+ raise
340
346
  return False
341
347
 
342
348
  async def _isfile(self, path, **kwargs):
@@ -463,14 +463,20 @@ class HTTPFileSystem(AbstractFileSystem):
463
463
  end -= 1 # bytes range is inclusive
464
464
  return f"bytes={start}-{end}"
465
465
 
466
- def exists(self, path, **kwargs):
466
+ def exists(self, path, strict=False, **kwargs):
467
467
  kw = self.kwargs.copy()
468
468
  kw.update(kwargs)
469
469
  try:
470
470
  logger.debug(path)
471
471
  r = self.session.get(self.encode_url(path), **kw)
472
+ if strict:
473
+ self._raise_not_found_for_status(r, path)
472
474
  return r.status_code < 400
475
+ except FileNotFoundError:
476
+ return False
473
477
  except Exception:
478
+ if strict:
479
+ raise
474
480
  return False
475
481
 
476
482
  def isfile(self, path, **kwargs):
@@ -195,7 +195,7 @@ class LibArchiveFileSystem(AbstractArchiveFileSystem):
195
195
  if mode != "rb":
196
196
  raise NotImplementedError
197
197
 
198
- data = bytes()
198
+ data = b""
199
199
  with self._open_archive() as arc:
200
200
  for entry in arc:
201
201
  if entry.pathname != path:
@@ -166,6 +166,10 @@ class LocalFileSystem(AbstractFileSystem):
166
166
  """
167
167
  path1 = self._strip_protocol(path1)
168
168
  path2 = self._strip_protocol(path2)
169
+
170
+ if self.auto_mkdir:
171
+ self.makedirs(self._parent(path2), exist_ok=True)
172
+
169
173
  shutil.move(path1, path2)
170
174
 
171
175
  def link(self, src, dst, **kwargs):
@@ -219,7 +219,7 @@ class LazyReferenceMapper(collections.abc.MutableMapping):
219
219
  fs.pipe("/".join([root, ".zmetadata"]), json.dumps(met).encode())
220
220
  return LazyReferenceMapper(root, fs, **kwargs)
221
221
 
222
- @lru_cache()
222
+ @lru_cache
223
223
  def listdir(self):
224
224
  """List top-level directories"""
225
225
  dirs = (p.rsplit("/", 1)[0] for p in self.zmetadata if not p.startswith(".z"))
fsspec/json.py CHANGED
@@ -1,13 +1,8 @@
1
1
  import json
2
- from collections.abc import Mapping, Sequence
2
+ from collections.abc import Callable, Mapping, Sequence
3
3
  from contextlib import suppress
4
4
  from pathlib import PurePath
5
- from typing import (
6
- Any,
7
- Callable,
8
- ClassVar,
9
- Optional,
10
- )
5
+ from typing import Any, ClassVar
11
6
 
12
7
  from .registry import _import_class, get_filesystem_class
13
8
  from .spec import AbstractFileSystem
@@ -45,12 +40,12 @@ class FilesystemJSONDecoder(json.JSONDecoder):
45
40
  def __init__(
46
41
  self,
47
42
  *,
48
- object_hook: Optional[Callable[[dict[str, Any]], Any]] = None,
49
- parse_float: Optional[Callable[[str], Any]] = None,
50
- parse_int: Optional[Callable[[str], Any]] = None,
51
- parse_constant: Optional[Callable[[str], Any]] = None,
43
+ object_hook: Callable[[dict[str, Any]], Any] | None = None,
44
+ parse_float: Callable[[str], Any] | None = None,
45
+ parse_int: Callable[[str], Any] | None = None,
46
+ parse_constant: Callable[[str], Any] | None = None,
52
47
  strict: bool = True,
53
- object_pairs_hook: Optional[Callable[[list[tuple[str, Any]]], Any]] = None,
48
+ object_pairs_hook: Callable[[list[tuple[str, Any]]], Any] | None = None,
54
49
  ) -> None:
55
50
  self.original_object_hook = object_hook
56
51
 
fsspec/parquet.py CHANGED
@@ -1,8 +1,12 @@
1
1
  import io
2
2
  import json
3
3
  import warnings
4
+ from typing import Literal
5
+
6
+ import fsspec
4
7
 
5
8
  from .core import url_to_fs
9
+ from .spec import AbstractBufferedFile
6
10
  from .utils import merge_offset_ranges
7
11
 
8
12
  # Parquet-Specific Utilities for fsspec
@@ -14,19 +18,24 @@ from .utils import merge_offset_ranges
14
18
  # on remote file systems.
15
19
 
16
20
 
17
- def open_parquet_file(
18
- path,
19
- mode="rb",
20
- fs=None,
21
+ class AlreadyBufferedFile(AbstractBufferedFile):
22
+ def _fetch_range(self, start, end):
23
+ raise NotImplementedError
24
+
25
+
26
+ def open_parquet_files(
27
+ path: list[str],
28
+ mode: Literal["rb"] = "rb",
29
+ fs: None | fsspec.AbstractFileSystem = None,
21
30
  metadata=None,
22
- columns=None,
23
- row_groups=None,
24
- storage_options=None,
25
- strict=False,
26
- engine="auto",
27
- max_gap=64_000,
28
- max_block=256_000_000,
29
- footer_sample_size=1_000_000,
31
+ columns: None | list[str] = None,
32
+ row_groups: None | list[int] = None,
33
+ storage_options: None | dict = None,
34
+ engine: str = "auto",
35
+ max_gap: int = 64_000,
36
+ max_block: int = 256_000_000,
37
+ footer_sample_size: int = 1_000_000,
38
+ filters: None | list[list[list[str]]] = None,
30
39
  **kwargs,
31
40
  ):
32
41
  """
@@ -72,12 +81,6 @@ def open_parquet_file(
72
81
  storage_options : dict, optional
73
82
  Used to generate an `AbstractFileSystem` object if `fs` was
74
83
  not specified.
75
- strict : bool, optional
76
- Whether the resulting `KnownPartsOfAFile` cache should
77
- fetch reads that go beyond a known byte-range boundary.
78
- If `False` (the default), any read that ends outside a
79
- known part will be zero padded. Note that using
80
- `strict=True` may be useful for debugging.
81
84
  max_gap : int, optional
82
85
  Neighboring byte ranges will only be merged when their
83
86
  inter-range gap is <= `max_gap`. Default is 64KB.
@@ -89,6 +92,10 @@ def open_parquet_file(
89
92
  for the footer metadata. If the sampled bytes do not contain
90
93
  the footer, a second read request will be required, and
91
94
  performance will suffer. Default is 1MB.
95
+ filters : list[list], optional
96
+ List of filters to apply to prevent reading row groups, of the
97
+ same format as accepted by the loading engines. Ignored if
98
+ ``row_groups`` is specified.
92
99
  **kwargs :
93
100
  Optional key-word arguments to pass to `fs.open`
94
101
  """
@@ -96,20 +103,36 @@ def open_parquet_file(
96
103
  # Make sure we have an `AbstractFileSystem` object
97
104
  # to work with
98
105
  if fs is None:
99
- fs = url_to_fs(path, **(storage_options or {}))[0]
106
+ path0 = path
107
+ if isinstance(path, (list, tuple)):
108
+ path = path[0]
109
+ fs, path = url_to_fs(path, **(storage_options or {}))
110
+ else:
111
+ path0 = path
100
112
 
101
- # For now, `columns == []` not supported. Just use
102
- # default `open` command with `path` input
113
+ # For now, `columns == []` not supported, is the same
114
+ # as all columns
103
115
  if columns is not None and len(columns) == 0:
104
- return fs.open(path, mode=mode)
116
+ columns = None
105
117
 
106
118
  # Set the engine
107
119
  engine = _set_engine(engine)
108
120
 
109
- # Fetch the known byte ranges needed to read
110
- # `columns` and/or `row_groups`
121
+ if isinstance(path0, (list, tuple)):
122
+ paths = path0
123
+ elif "*" in path:
124
+ paths = fs.glob(path)
125
+ elif path0.endswith("/"): # or fs.isdir(path):
126
+ paths = [
127
+ _
128
+ for _ in fs.find(path, withdirs=False, detail=False)
129
+ if _.endswith((".parquet", ".parq"))
130
+ ]
131
+ else:
132
+ paths = [path]
133
+
111
134
  data = _get_parquet_byte_ranges(
112
- [path],
135
+ paths,
113
136
  fs,
114
137
  metadata=metadata,
115
138
  columns=columns,
@@ -118,24 +141,37 @@ def open_parquet_file(
118
141
  max_gap=max_gap,
119
142
  max_block=max_block,
120
143
  footer_sample_size=footer_sample_size,
144
+ filters=filters,
121
145
  )
122
146
 
123
- # Extract file name from `data`
124
- fn = next(iter(data)) if data else path
125
-
126
147
  # Call self.open with "parts" caching
127
148
  options = kwargs.pop("cache_options", {}).copy()
128
- return fs.open(
129
- fn,
130
- mode=mode,
131
- cache_type="parts",
132
- cache_options={
133
- **options,
134
- "data": data.get(fn, {}),
135
- "strict": strict,
136
- },
137
- **kwargs,
138
- )
149
+ return [
150
+ AlreadyBufferedFile(
151
+ fs=None,
152
+ path=fn,
153
+ mode=mode,
154
+ cache_type="parts",
155
+ cache_options={
156
+ **options,
157
+ "data": data.get(fn, {}),
158
+ },
159
+ size=max(_[1] for _ in data.get(fn, {})),
160
+ **kwargs,
161
+ )
162
+ for fn in data
163
+ ]
164
+
165
+
166
+ def open_parquet_file(*args, **kwargs):
167
+ """Create files tailed to reading specific parts of parquet files
168
+
169
+ Please see ``open_parquet_files`` for details of the arguments. The
170
+ difference is, this function always returns a single ``AleadyBufferedFile``,
171
+ whereas `open_parquet_files`` always returns a list of files, even if
172
+ there are one or zero matching parquet files.
173
+ """
174
+ return open_parquet_files(*args, **kwargs)[0]
139
175
 
140
176
 
141
177
  def _get_parquet_byte_ranges(
@@ -148,6 +184,7 @@ def _get_parquet_byte_ranges(
148
184
  max_block=256_000_000,
149
185
  footer_sample_size=1_000_000,
150
186
  engine="auto",
187
+ filters=None,
151
188
  ):
152
189
  """Get a dictionary of the known byte ranges needed
153
190
  to read a specific column/row-group selection from a
@@ -172,6 +209,7 @@ def _get_parquet_byte_ranges(
172
209
  row_groups=row_groups,
173
210
  max_gap=max_gap,
174
211
  max_block=max_block,
212
+ filters=filters,
175
213
  )
176
214
 
177
215
  # Get file sizes asynchronously
@@ -183,17 +221,16 @@ def _get_parquet_byte_ranges(
183
221
  data_starts = []
184
222
  data_ends = []
185
223
  add_header_magic = True
186
- if columns is None and row_groups is None:
224
+ if columns is None and row_groups is None and filters is None:
187
225
  # We are NOT selecting specific columns or row-groups.
188
226
  #
189
227
  # We can avoid sampling the footers, and just transfer
190
228
  # all file data with cat_ranges
191
229
  for i, path in enumerate(paths):
192
230
  result[path] = {}
193
- for b in range(0, file_sizes[i], max_block):
194
- data_paths.append(path)
195
- data_starts.append(b)
196
- data_ends.append(min(b + max_block, file_sizes[i]))
231
+ data_paths.append(path)
232
+ data_starts.append(0)
233
+ data_ends.append(file_sizes[i])
197
234
  add_header_magic = False # "Magic" should already be included
198
235
  else:
199
236
  # We ARE selecting specific columns or row-groups.
@@ -235,29 +272,21 @@ def _get_parquet_byte_ranges(
235
272
 
236
273
  # Calculate required byte ranges for each path
237
274
  for i, path in enumerate(paths):
238
- # Deal with small-file case.
239
- # Just include all remaining bytes of the file
240
- # in a single range.
241
- if file_sizes[i] < max_block:
242
- if footer_starts[i] > 0:
243
- # Only need to transfer the data if the
244
- # footer sample isn't already the whole file
245
- data_paths.append(path)
246
- data_starts.append(0)
247
- data_ends.append(footer_starts[i])
248
- continue
249
-
250
275
  # Use "engine" to collect data byte ranges
251
276
  path_data_starts, path_data_ends = engine._parquet_byte_ranges(
252
277
  columns,
253
278
  row_groups=row_groups,
254
279
  footer=footer_samples[i],
255
280
  footer_start=footer_starts[i],
281
+ filters=filters,
256
282
  )
257
283
 
258
284
  data_paths += [path] * len(path_data_starts)
259
285
  data_starts += path_data_starts
260
286
  data_ends += path_data_ends
287
+ result.setdefault(path, {})[(footer_starts[i], file_sizes[i])] = (
288
+ footer_samples[i]
289
+ )
261
290
 
262
291
  # Merge adjacent offset ranges
263
292
  data_paths, data_starts, data_ends = merge_offset_ranges(
@@ -291,6 +320,7 @@ def _get_parquet_byte_ranges_from_metadata(
291
320
  row_groups=None,
292
321
  max_gap=64_000,
293
322
  max_block=256_000_000,
323
+ filters=None,
294
324
  ):
295
325
  """Simplified version of `_get_parquet_byte_ranges` for
296
326
  the case that an engine-specific `metadata` object is
@@ -300,9 +330,7 @@ def _get_parquet_byte_ranges_from_metadata(
300
330
 
301
331
  # Use "engine" to collect data byte ranges
302
332
  data_paths, data_starts, data_ends = engine._parquet_byte_ranges(
303
- columns,
304
- row_groups=row_groups,
305
- metadata=metadata,
333
+ columns, row_groups=row_groups, metadata=metadata, filters=filters
306
334
  )
307
335
 
308
336
  # Merge adjacent offset ranges
@@ -401,16 +429,19 @@ class FastparquetEngine:
401
429
  metadata=None,
402
430
  footer=None,
403
431
  footer_start=None,
432
+ filters=None,
404
433
  ):
405
434
  # Initialize offset ranges and define ParqetFile metadata
406
435
  pf = metadata
407
436
  data_paths, data_starts, data_ends = [], [], []
437
+ if filters and row_groups:
438
+ raise ValueError("filters and row_groups cannot be used together")
408
439
  if pf is None:
409
440
  pf = self.fp.ParquetFile(io.BytesIO(footer))
410
441
 
411
442
  # Convert columns to a set and add any index columns
412
443
  # specified in the pandas metadata (just in case)
413
- column_set = None if columns is None else set(columns)
444
+ column_set = None if columns is None else {c.split(".", 1)[0] for c in columns}
414
445
  if column_set is not None and hasattr(pf, "pandas_metadata"):
415
446
  md_index = [
416
447
  ind
@@ -422,7 +453,12 @@ class FastparquetEngine:
422
453
 
423
454
  # Check if row_groups is a list of integers
424
455
  # or a list of row-group metadata
425
- if row_groups and not isinstance(row_groups[0], int):
456
+ if filters:
457
+ from fastparquet.api import filter_row_groups
458
+
459
+ row_group_indices = None
460
+ row_groups = filter_row_groups(pf, filters)
461
+ elif row_groups and not isinstance(row_groups[0], int):
426
462
  # Input row_groups contains row-group metadata
427
463
  row_group_indices = None
428
464
  else:
@@ -486,9 +522,12 @@ class PyarrowEngine:
486
522
  metadata=None,
487
523
  footer=None,
488
524
  footer_start=None,
525
+ filters=None,
489
526
  ):
490
527
  if metadata is not None:
491
528
  raise ValueError("metadata input not supported for PyarrowEngine")
529
+ if filters:
530
+ raise NotImplementedError
492
531
 
493
532
  data_starts, data_ends = [], []
494
533
  md = self.pq.ParquetFile(io.BytesIO(footer)).metadata
fsspec/registry.py CHANGED
@@ -72,6 +72,9 @@ known_implementations = {
72
72
  "class": "fsspec.implementations.arrow.HadoopFileSystem",
73
73
  "err": "pyarrow and local java libraries required for HDFS",
74
74
  },
75
+ "async_wrapper": {
76
+ "class": "fsspec.implementations.asyn_wrapper.AsyncFileSystemWrapper",
77
+ },
75
78
  "asynclocal": {
76
79
  "class": "morefs.asyn_local.AsyncLocalFileSystem",
77
80
  "err": "Install 'morefs[asynclocalfs]' to use AsyncLocalFileSystem",
@@ -186,7 +189,7 @@ known_implementations = {
186
189
  },
187
190
  "pyscript": {
188
191
  "class": "pyscript_fsspec_client.client.PyscriptFileSystem",
189
- "err": "Install requests (cpython) or run in pyscript",
192
+ "err": "This only runs in a pyscript context",
190
193
  },
191
194
  "reference": {"class": "fsspec.implementations.reference.ReferenceFileSystem"},
192
195
  "root": {
fsspec/utils.py CHANGED
@@ -7,23 +7,16 @@ import os
7
7
  import re
8
8
  import sys
9
9
  import tempfile
10
- from collections.abc import Iterable, Iterator, Sequence
10
+ from collections.abc import Callable, Iterable, Iterator, Sequence
11
11
  from functools import partial
12
12
  from hashlib import md5
13
13
  from importlib.metadata import version
14
- from typing import (
15
- IO,
16
- TYPE_CHECKING,
17
- Any,
18
- Callable,
19
- TypeVar,
20
- )
14
+ from typing import IO, TYPE_CHECKING, Any, TypeVar
21
15
  from urllib.parse import urlsplit
22
16
 
23
17
  if TYPE_CHECKING:
24
18
  import pathlib
25
-
26
- from typing_extensions import TypeGuard
19
+ from typing import TypeGuard
27
20
 
28
21
  from fsspec.spec import AbstractFileSystem
29
22
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: fsspec
3
- Version: 2025.10.0
3
+ Version: 2026.1.0
4
4
  Summary: File-system specification
5
5
  Project-URL: Changelog, https://filesystem-spec.readthedocs.io/en/latest/changelog.html
6
6
  Project-URL: Documentation, https://filesystem-spec.readthedocs.io/en/latest/
@@ -12,12 +12,12 @@ Keywords: file
12
12
  Classifier: Development Status :: 4 - Beta
13
13
  Classifier: Intended Audience :: Developers
14
14
  Classifier: Operating System :: OS Independent
15
- Classifier: Programming Language :: Python :: 3.9
16
15
  Classifier: Programming Language :: Python :: 3.10
17
16
  Classifier: Programming Language :: Python :: 3.11
18
17
  Classifier: Programming Language :: Python :: 3.12
19
18
  Classifier: Programming Language :: Python :: 3.13
20
- Requires-Python: >=3.9
19
+ Classifier: Programming Language :: Python :: 3.14
20
+ Requires-Python: >=3.10
21
21
  Provides-Extra: abfs
22
22
  Requires-Dist: adlfs; extra == 'abfs'
23
23
  Provides-Extra: adl
@@ -49,7 +49,7 @@ Requires-Dist: distributed; extra == 'full'
49
49
  Requires-Dist: dropbox; extra == 'full'
50
50
  Requires-Dist: dropboxdrivefs; extra == 'full'
51
51
  Requires-Dist: fusepy; extra == 'full'
52
- Requires-Dist: gcsfs; extra == 'full'
52
+ Requires-Dist: gcsfs>2024.2.0; extra == 'full'
53
53
  Requires-Dist: libarchive-c; extra == 'full'
54
54
  Requires-Dist: ocifs; extra == 'full'
55
55
  Requires-Dist: panel; extra == 'full'
@@ -57,7 +57,7 @@ Requires-Dist: paramiko; extra == 'full'
57
57
  Requires-Dist: pyarrow>=1; extra == 'full'
58
58
  Requires-Dist: pygit2; extra == 'full'
59
59
  Requires-Dist: requests; extra == 'full'
60
- Requires-Dist: s3fs; extra == 'full'
60
+ Requires-Dist: s3fs>2024.2.0; extra == 'full'
61
61
  Requires-Dist: smbprotocol; extra == 'full'
62
62
  Requires-Dist: tqdm; extra == 'full'
63
63
  Provides-Extra: fuse
@@ -108,6 +108,7 @@ Requires-Dist: xarray; extra == 'test-downstream'
108
108
  Provides-Extra: test-full
109
109
  Requires-Dist: adlfs; extra == 'test-full'
110
110
  Requires-Dist: aiohttp!=4.0.0a0,!=4.0.0a1; extra == 'test-full'
111
+ Requires-Dist: backports-zstd; (python_version < '3.14') and extra == 'test-full'
111
112
  Requires-Dist: cloudpickle; extra == 'test-full'
112
113
  Requires-Dist: dask; extra == 'test-full'
113
114
  Requires-Dist: distributed; extra == 'test-full'
@@ -143,7 +144,6 @@ Requires-Dist: smbprotocol; extra == 'test-full'
143
144
  Requires-Dist: tqdm; extra == 'test-full'
144
145
  Requires-Dist: urllib3; extra == 'test-full'
145
146
  Requires-Dist: zarr; extra == 'test-full'
146
- Requires-Dist: zstandard; (python_version < '3.14') and extra == 'test-full'
147
147
  Provides-Extra: tqdm
148
148
  Requires-Dist: tqdm; extra == 'tqdm'
149
149
  Description-Content-Type: text/markdown
@@ -197,7 +197,7 @@ CI runtime. For local use, pick a version suitable for you.
197
197
 
198
198
  ```bash
199
199
  # For a new environment (mamba / conda).
200
- mamba create -n fsspec -c conda-forge python=3.9 -y
200
+ mamba create -n fsspec -c conda-forge python=3.10 -y
201
201
  conda activate fsspec
202
202
 
203
203
  # Standard dev install with docs and tests.
@@ -1,47 +1,47 @@
1
1
  fsspec/__init__.py,sha256=L7qwNBU1iMNQd8Of87HYSNFT9gWlNMSESaJC8fY0AaQ,2053
2
- fsspec/_version.py,sha256=fXgQLiXV0scw4LTidVAhOWJj_BwnxWigALeToXadaR0,712
2
+ fsspec/_version.py,sha256=u-dFB5QwBm8gDrxy5VP5UIh5ycEbPIDyQZ0EuPEDRHs,710
3
3
  fsspec/archive.py,sha256=vM6t_lgV6lBWbBYwpm3S4ofBQFQxUPr5KkDQrrQcQro,2411
4
- fsspec/asyn.py,sha256=mE55tO_MmGcxD14cUuaiS3veAqo0h6ZqANfnUuCN3sk,36365
5
- fsspec/caching.py,sha256=86uSgPa5E55b28XEhuC-dMcKAxJtZZnpQqnHTwaF3hI,34294
4
+ fsspec/asyn.py,sha256=LP_OicTWXmKHe31wBoYs2MrrNf8rmlhjVeGg5AqvVy8,36630
5
+ fsspec/caching.py,sha256=B2xeDz9-VDgr_dDeVOTNRq3vaS9zVUe0nxtOBgsrjUk,34260
6
6
  fsspec/callbacks.py,sha256=BDIwLzK6rr_0V5ch557fSzsivCElpdqhXr5dZ9Te-EE,9210
7
- fsspec/compression.py,sha256=gBK2MV_oTFVW2XDq8bZVbYQKYrl6JDUou6_-kyvmxuk,5086
7
+ fsspec/compression.py,sha256=jwxtX_2hPr97s53W89ywkxH3wm-ZGiUoUyCCsFA4V1Y,4698
8
8
  fsspec/config.py,sha256=LF4Zmu1vhJW7Je9Q-cwkRc3xP7Rhyy7Xnwj26Z6sv2g,4279
9
9
  fsspec/conftest.py,sha256=uWfm_Qs5alPRxOhRpDfQ0-1jqSJ54pni4y96IxOREXM,3446
10
- fsspec/core.py,sha256=ETQrATK6ZSkuIoy5-40N_NWUfMGx1KVSl5XGuJsaoYI,23829
10
+ fsspec/core.py,sha256=lc7XSnZU6_C6xljp7Z_xEGN3V7704hbeQLkxvPP0wds,24173
11
11
  fsspec/dircache.py,sha256=YzogWJrhEastHU7vWz-cJiJ7sdtLXFXhEpInGKd4EcM,2717
12
12
  fsspec/exceptions.py,sha256=pauSLDMxzTJMOjvX1WEUK0cMyFkrFxpWJsyFywav7A8,331
13
13
  fsspec/fuse.py,sha256=Q-3NOOyLqBfYa4Db5E19z_ZY36zzYHtIs1mOUasItBQ,10177
14
14
  fsspec/generic.py,sha256=9QHQYMNb-8w8-eYuIqShcTjO_LeHXFoQTyt8J5oEq5Q,13482
15
15
  fsspec/gui.py,sha256=CQ7QsrTpaDlWSLNOpwNoJc7khOcYXIZxmrAJN9bHWQU,14002
16
- fsspec/json.py,sha256=3BfNSQ96MB4Xao_ocjheINeqZM2ev7oljUzR5XmNXrE,3814
16
+ fsspec/json.py,sha256=4EBZ-xOmRiyxmIqPIwxmDImosRQ7io7qBM2xjJPsEE4,3768
17
17
  fsspec/mapping.py,sha256=m2ndB_gtRBXYmNJg0Ie1-BVR75TFleHmIQBzC-yWhjU,8343
18
- fsspec/parquet.py,sha256=6ibAmG527L5JNFS0VO8BDNlxHdA3bVYqdByeiFgpUVM,19448
19
- fsspec/registry.py,sha256=epoYryFFzDWjbkQJfh6xkF3nEu8RTiOzV3-voi8Pshs,12048
18
+ fsspec/parquet.py,sha256=vpOuoxg0y0iS2yoiXeAUcWB0n-wct2x2L6Vmn_O-hRQ,20668
19
+ fsspec/registry.py,sha256=o7EGl8TEaLkcwN53X_103arzuzJeeOoVaNUWnPiXgf0,12148
20
20
  fsspec/spec.py,sha256=Ym-Ust6LRjHgbhrmvNqwOBZxoVnaw3g3xHXMZGHx_xg,77692
21
21
  fsspec/transaction.py,sha256=xliRG6U2Zf3khG4xcw9WiB-yAoqJSHEGK_VjHOdtgo0,2398
22
- fsspec/utils.py,sha256=12npx0j-z0qpG9ko3laZ0us4KhjMe-2epmyocJB2ih8,23173
22
+ fsspec/utils.py,sha256=5DOxB_eE-wNHwrc6zp3h-oMp5mCLw4tsQZqqPTjLDmM,23136
23
23
  fsspec/implementations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
- fsspec/implementations/arrow.py,sha256=CVVyjNt9B_pRa5Ac1XIlhVLBs7vA5kCUZjsLPELb4d4,8758
25
- fsspec/implementations/asyn_wrapper.py,sha256=fox9yjsEu7NCgzdAZJYfNALtUnFkIc_QmeKzaSllZho,3679
24
+ fsspec/implementations/arrow.py,sha256=6BaSEBZ4nb8UuY6NsyFevGzXcdJWamt3qEHjMe2S-W8,8831
25
+ fsspec/implementations/asyn_wrapper.py,sha256=3lfJkGs6D_AwRBdxTSYlL-RCVdaXBZ9Itys2P5o5Si0,3738
26
26
  fsspec/implementations/cache_mapper.py,sha256=W4wlxyPxZbSp9ItJ0pYRVBMh6bw9eFypgP6kUYuuiI4,2421
27
- fsspec/implementations/cache_metadata.py,sha256=rddh5-0SXIeyWCPpBpOFcaAyWoPyeYmFfeubEWt-nRM,8536
28
- fsspec/implementations/cached.py,sha256=d3IE33J5QA4QU_e43fPLF-dpbGszv6JM9mcFmHBI99o,35365
27
+ fsspec/implementations/cache_metadata.py,sha256=ipIe4S8nlU_M9oRJkvTqr-b0tcbXVZsxH3GxaelaNOY,8502
28
+ fsspec/implementations/cached.py,sha256=gp1eaM2X7ix2eGRDaC8rtTuO0icK6hbz3yDXh9YdB0E,35392
29
29
  fsspec/implementations/chained.py,sha256=iGivpNaHUFjB_ea0-HAPhcmm6CL8qnDf270PSj7JwuE,680
30
30
  fsspec/implementations/dask.py,sha256=CXZbJzIVOhKV8ILcxuy3bTvcacCueAbyQxmvAkbPkrk,4466
31
- fsspec/implementations/data.py,sha256=LDLczxRh8h7x39Zjrd-GgzdQHr78yYxDlrv2C9Uxb5E,1658
31
+ fsspec/implementations/data.py,sha256=IhOGDkacYp5gkl9jhEu4msQfZPb0gS5Q_ml7Mbr6dgQ,1627
32
32
  fsspec/implementations/dbfs.py,sha256=1cvvC6KBWOb8pBVpc01xavVbEPXO1xsgZvPD7H73M9k,16217
33
- fsspec/implementations/dirfs.py,sha256=f1sGnQ9Vf0xTxrXo4jDeBy4Qfq3RTqAEemqBSeb0hwY,12108
34
- fsspec/implementations/ftp.py,sha256=bzL_TgH77nMMtTMewRGkbq4iObSHGu7YoMRCXBH4nrc,11639
33
+ fsspec/implementations/dirfs.py,sha256=VNj6gPMfmmLPK4wxbtxt7mUqW7xkh2XDgMmEmSK_E1c,12166
34
+ fsspec/implementations/ftp.py,sha256=fJhaMIKq2RvzYlLwG3bewy2jq4iRqjVt1aIpwtUIRwI,13235
35
35
  fsspec/implementations/gist.py,sha256=Y6jTDrE-wuTwvpPyAQDuuOMBGxlajafKWoB1_yX6jdY,8528
36
36
  fsspec/implementations/git.py,sha256=qBDWMz5LNllPqVjr5jf_1FuNha4P5lyQI3IlhYg-wUE,3731
37
37
  fsspec/implementations/github.py,sha256=aCsZL8UvXZgdkcB1RUs3DdLeNrjLKcFsFYeQFDWbBFo,11653
38
- fsspec/implementations/http.py,sha256=IxwekrxehP_l02yMiSHE7Xl1trCO5FzKTycH_iuHQG0,30468
39
- fsspec/implementations/http_sync.py,sha256=UydDqSdUBdhiJ1KufzV8rKGrTftFR4QmNV0safILb8g,30133
38
+ fsspec/implementations/http.py,sha256=-AV5qeNpBWqnsmgnIO9Ily9B6--SR4sQJ7G4cBHarGE,30675
39
+ fsspec/implementations/http_sync.py,sha256=UmBqd938ebwVjYgVtzg-ysG3ZoGhIJw0wFtQAfxV3Aw,30332
40
40
  fsspec/implementations/jupyter.py,sha256=q1PlQ66AAswGFyr8MFKWyobaV2YekMWRtqENBDQtD28,4002
41
- fsspec/implementations/libarchive.py,sha256=5_I2DiLXwQ1JC8x-K7jXu-tBwhO9dj7tFLnb0bTnVMQ,7102
42
- fsspec/implementations/local.py,sha256=DQeK7jRGv4_mJAweLKALO5WzIIkjXxZ_jRvwQ_xadSA,16936
41
+ fsspec/implementations/libarchive.py,sha256=SpIA1F-zf7kb2-VYUVuhMrXTBOhBxUXKgEW1RaAdDoA,7098
42
+ fsspec/implementations/local.py,sha256=ERDUdXdRI8AvRX06icXaDKwO-hcQgivc7EorqnayFFM,17028
43
43
  fsspec/implementations/memory.py,sha256=TDdLtSPWXxZKrrVGwmc3uS3oK_2mlcVTk2BiqR8IeII,10507
44
- fsspec/implementations/reference.py,sha256=3hr_CusIR1wBGo20MsKGoWCEnZJ626_QlHhRYobVYo0,48816
44
+ fsspec/implementations/reference.py,sha256=xSUpB8o_QFAZiVJE2dt78QZMCUMLo5TaJ27e5DwDAfg,48814
45
45
  fsspec/implementations/sftp.py,sha256=L9pZOa6eLUWfJNtxkxeG2YI96SQwrM5Hj6ocyUZXUbg,5923
46
46
  fsspec/implementations/smb.py,sha256=5fhu8h06nOLBPh2c48aT7WBRqh9cEcbIwtyu06wTjec,15236
47
47
  fsspec/implementations/tar.py,sha256=dam78Tp_CozybNqCY2JYgGBS3Uc9FuJUAT9oB0lolOs,4111
@@ -55,7 +55,7 @@ fsspec/tests/abstract/mv.py,sha256=k8eUEBIrRrGMsBY5OOaDXdGnQUKGwDIfQyduB6YD3Ns,1
55
55
  fsspec/tests/abstract/open.py,sha256=Fi2PBPYLbRqysF8cFm0rwnB41kMdQVYjq8cGyDXp3BU,329
56
56
  fsspec/tests/abstract/pipe.py,sha256=LFzIrLCB5GLXf9rzFKJmE8AdG7LQ_h4bJo70r8FLPqM,402
57
57
  fsspec/tests/abstract/put.py,sha256=7aih17OKB_IZZh1Mkq1eBDIjobhtMQmI8x-Pw-S_aZk,21201
58
- fsspec-2025.10.0.dist-info/METADATA,sha256=M950PL-JM4aP_1zCRiebQ-lOaUqy9_4kra0-dqA8tCI,10398
59
- fsspec-2025.10.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
60
- fsspec-2025.10.0.dist-info/licenses/LICENSE,sha256=LcNUls5TpzB5FcAIqESq1T53K0mzTN0ARFBnaRQH7JQ,1513
61
- fsspec-2025.10.0.dist-info/RECORD,,
58
+ fsspec-2026.1.0.dist-info/METADATA,sha256=8mMtsh4ST3Onc1RmJWIyCY6ih2YzdtmD5iAhvaS2wnM,10423
59
+ fsspec-2026.1.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
60
+ fsspec-2026.1.0.dist-info/licenses/LICENSE,sha256=LcNUls5TpzB5FcAIqESq1T53K0mzTN0ARFBnaRQH7JQ,1513
61
+ fsspec-2026.1.0.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: hatchling 1.27.0
2
+ Generator: hatchling 1.28.0
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any