s3fs 2025.12.0__tar.gz → 2026.2.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. {s3fs-2025.12.0/s3fs.egg-info → s3fs-2026.2.0}/PKG-INFO +3 -3
  2. {s3fs-2025.12.0 → s3fs-2026.2.0}/docs/source/changelog.rst +13 -0
  3. {s3fs-2025.12.0 → s3fs-2026.2.0}/docs/source/index.rst +61 -0
  4. s3fs-2026.2.0/requirements.txt +3 -0
  5. {s3fs-2025.12.0 → s3fs-2026.2.0}/s3fs/__init__.py +1 -1
  6. {s3fs-2025.12.0 → s3fs-2026.2.0}/s3fs/_version.py +3 -3
  7. {s3fs-2025.12.0 → s3fs-2026.2.0}/s3fs/core.py +85 -11
  8. s3fs-2026.2.0/s3fs/tests/test_custom_error_handler.py +255 -0
  9. {s3fs-2025.12.0 → s3fs-2026.2.0}/s3fs/tests/test_s3fs.py +28 -0
  10. {s3fs-2025.12.0 → s3fs-2026.2.0/s3fs.egg-info}/PKG-INFO +3 -3
  11. {s3fs-2025.12.0 → s3fs-2026.2.0}/s3fs.egg-info/SOURCES.txt +1 -0
  12. s3fs-2026.2.0/s3fs.egg-info/requires.txt +3 -0
  13. s3fs-2025.12.0/requirements.txt +0 -3
  14. s3fs-2025.12.0/s3fs.egg-info/requires.txt +0 -3
  15. {s3fs-2025.12.0 → s3fs-2026.2.0}/LICENSE.txt +0 -0
  16. {s3fs-2025.12.0 → s3fs-2026.2.0}/MANIFEST.in +0 -0
  17. {s3fs-2025.12.0 → s3fs-2026.2.0}/README.md +0 -0
  18. {s3fs-2025.12.0 → s3fs-2026.2.0}/docs/source/api.rst +0 -0
  19. {s3fs-2025.12.0 → s3fs-2026.2.0}/docs/source/code-of-conduct.rst +0 -0
  20. {s3fs-2025.12.0 → s3fs-2026.2.0}/docs/source/development.rst +0 -0
  21. {s3fs-2025.12.0 → s3fs-2026.2.0}/docs/source/install.rst +0 -0
  22. {s3fs-2025.12.0 → s3fs-2026.2.0}/s3fs/errors.py +0 -0
  23. {s3fs-2025.12.0 → s3fs-2026.2.0}/s3fs/mapping.py +0 -0
  24. {s3fs-2025.12.0 → s3fs-2026.2.0}/s3fs/tests/__init__.py +0 -0
  25. {s3fs-2025.12.0 → s3fs-2026.2.0}/s3fs/tests/derived/__init__.py +0 -0
  26. {s3fs-2025.12.0 → s3fs-2026.2.0}/s3fs/tests/derived/s3fs_fixtures.py +0 -0
  27. {s3fs-2025.12.0 → s3fs-2026.2.0}/s3fs/tests/derived/s3fs_test.py +0 -0
  28. {s3fs-2025.12.0 → s3fs-2026.2.0}/s3fs/tests/test_mapping.py +0 -0
  29. {s3fs-2025.12.0 → s3fs-2026.2.0}/s3fs/tests/test_utils.py +0 -0
  30. {s3fs-2025.12.0 → s3fs-2026.2.0}/s3fs/utils.py +0 -0
  31. {s3fs-2025.12.0 → s3fs-2026.2.0}/s3fs.egg-info/dependency_links.txt +0 -0
  32. {s3fs-2025.12.0 → s3fs-2026.2.0}/s3fs.egg-info/not-zip-safe +0 -0
  33. {s3fs-2025.12.0 → s3fs-2026.2.0}/s3fs.egg-info/top_level.txt +0 -0
  34. {s3fs-2025.12.0 → s3fs-2026.2.0}/setup.cfg +0 -0
  35. {s3fs-2025.12.0 → s3fs-2026.2.0}/setup.py +0 -0
  36. {s3fs-2025.12.0 → s3fs-2026.2.0}/versioneer.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: s3fs
3
- Version: 2025.12.0
3
+ Version: 2026.2.0
4
4
  Summary: Convenient Filesystem interface over S3
5
5
  Home-page: http://github.com/fsspec/s3fs/
6
6
  Maintainer: Martin Durant
@@ -19,8 +19,8 @@ Classifier: Programming Language :: Python :: 3.14
19
19
  Requires-Python: >= 3.10
20
20
  Description-Content-Type: text/markdown
21
21
  License-File: LICENSE.txt
22
- Requires-Dist: aiobotocore<3.0.0,>=2.5.4
23
- Requires-Dist: fsspec==2025.12.0
22
+ Requires-Dist: aiobotocore<4.0.0,>=2.19.0
23
+ Requires-Dist: fsspec==2026.2.0
24
24
  Requires-Dist: aiohttp!=4.0.0a0,!=4.0.0a1
25
25
  Dynamic: classifier
26
26
  Dynamic: description
@@ -1,6 +1,19 @@
1
1
  Changelog
2
2
  =========
3
3
 
4
+ 2026.2.0
5
+ --------
6
+
7
+ - add custom error handling (#1003)
8
+ - do delete placeholders with rm(recursive=True) (#1005)
9
+ - force new session if it was explicitly closed (#1002)
10
+
11
+
12
+ 2026.1.0
13
+ --------
14
+
15
+ - allow aiobotocore 3 (#998)
16
+
4
17
  2025.12.0
5
18
  ---------
6
19
 
@@ -154,6 +154,67 @@ Python's standard `logging framework`_.
154
154
 
155
155
  .. _logging framework: https://docs.python.org/3/library/logging.html
156
156
 
157
+ Errors
158
+ ------
159
+
160
+ The ``s3fs`` library includes a built-in mechanism to automatically retry
161
+ operations when specific transient errors occur. You can customize this behavior
162
+ by adding specific exception types or defining complex logic via custom handlers.
163
+
164
+ Default Retryable Errors
165
+ ~~~~~~~~~~~~~~~~~~~~~~~~
166
+
167
+ By default, ``s3fs`` will retry the following exception types:
168
+
169
+ - ``socket.timeout``
170
+ - ``HTTPClientError``
171
+ - ``IncompleteRead``
172
+ - ``FSTimeoutError``
173
+ - ``ResponseParserError``
174
+ - ``aiohttp.ClientPayloadError`` (if available)
175
+
176
+ Registering Custom Error Types
177
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
178
+
179
+ To include additional exception types in the default retry logic, use the
180
+ ``add_retryable_error`` function. This is useful for simple type-based retries.
181
+
182
+ .. code-block:: python
183
+
184
+ >>> class MyCustomError(Exception):
185
+ pass
186
+ >>> s3fs.add_retryable_error(MyCustomError)
187
+
188
+ Implementing Custom Error Handlers
189
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
190
+
191
+ For more complex scenarios, such as retrying based on an error message rather than
192
+ just the type, you can register a custom error handler using ``set_custom_error_handler``.
193
+
194
+ The handler should be a callable that accepts an exception instance and returns ``True``
195
+ if the error should be retried, or ``False`` otherwise.
196
+
197
+ .. code-block:: python
198
+
199
+ >>> def my_handler(e):
200
+ return isinstance(e, MyCustomError) and "some condition" in str(e)
201
+ >>> s3fs.set_custom_error_handler(my_handler)
202
+
203
+ Handling AWS ClientErrors
204
+ ~~~~~~~~~~~~~~~~~~~~~~~~~
205
+
206
+ ``s3fs`` provides specialized handling for ``botocore.exceptions.ClientError``.
207
+ While ``s3fs`` checks these against internal patterns (like throttling),
208
+ you can extend this behavior using a custom handler. Note that the internal
209
+ patterns will still be checked and handled before the custom handler.
210
+
211
+ .. code-block:: python
212
+
213
+ >>> def another_handler(e):
214
+ return isinstance(e, ClientError) and "Throttling" in str(e)
215
+ >>> s3fs.set_custom_error_handler(another_handler)
216
+
217
+
157
218
  Credentials
158
219
  -----------
159
220
 
@@ -0,0 +1,3 @@
1
+ aiobotocore>=2.19.0,<4.0.0
2
+ fsspec==2026.2.0
3
+ aiohttp!=4.0.0a0, !=4.0.0a1
@@ -1,4 +1,4 @@
1
- from .core import S3FileSystem, S3File
1
+ from .core import S3FileSystem, S3File, add_retryable_error, set_custom_error_handler
2
2
  from .mapping import S3Map
3
3
 
4
4
  from ._version import get_versions
@@ -8,11 +8,11 @@ import json
8
8
 
9
9
  version_json = '''
10
10
  {
11
- "date": "2025-12-03T10:32:02-0500",
11
+ "date": "2026-02-05T16:57:01-0500",
12
12
  "dirty": false,
13
13
  "error": null,
14
- "full-revisionid": "65f394575b9667f33b59473dc28a8f1cf6708745",
15
- "version": "2025.12.0"
14
+ "full-revisionid": "1181d335955418f081a1d0b94c3d8350cea0751f",
15
+ "version": "2026.2.0"
16
16
  }
17
17
  ''' # END VERSION_JSON
18
18
 
@@ -73,6 +73,56 @@ MAX_UPLOAD_PARTS = 10_000 # maximum number of parts for S3 multipart upload
73
73
  if ClientPayloadError is not None:
74
74
  S3_RETRYABLE_ERRORS += (ClientPayloadError,)
75
75
 
76
+
77
+ def add_retryable_error(exc):
78
+ """
79
+ Add an exception type to the list of retryable S3 errors.
80
+
81
+ Parameters
82
+ ----------
83
+ exc : Exception
84
+ The exception type to add to the retryable errors.
85
+
86
+ Examples
87
+ ----------
88
+ >>> class MyCustomError(Exception): # doctest: +SKIP
89
+ ... pass # doctest: +SKIP
90
+ >>> add_retryable_error(MyCustomError) # doctest: +SKIP
91
+ """
92
+ global S3_RETRYABLE_ERRORS
93
+ S3_RETRYABLE_ERRORS += (exc,)
94
+
95
+
96
+ CUSTOM_ERROR_HANDLER = lambda _: False
97
+
98
+
99
+ def set_custom_error_handler(func):
100
+ """Set a custom error handler function for S3 retryable errors.
101
+
102
+ The function should take an exception instance as its only argument,
103
+ and return True if the operation should be retried, or False otherwise.
104
+ This can also be used for custom behavior on `ClientError` exceptions,
105
+ such as retrying other patterns.
106
+
107
+ Parameters
108
+ ----------
109
+ func : callable[[Exception], bool]
110
+ The custom error handler function.
111
+
112
+ Examples
113
+ ----------
114
+ >>> def my_handler(e): # doctest: +SKIP
115
+ ... return isinstance(e, MyCustomError) and "some condition" in str(e) # doctest: +SKIP
116
+ >>> set_custom_error_handler(my_handler) # doctest: +SKIP
117
+
118
+ >>> def another_handler(e): # doctest: +SKIP
119
+ ... return isinstance(e, ClientError) and "Throttling" in str(e)" # doctest: +SKIP
120
+ >>> set_custom_error_handler(another_handler) # doctest: +SKIP
121
+ """
122
+ global CUSTOM_ERROR_HANDLER
123
+ CUSTOM_ERROR_HANDLER = func
124
+
125
+
76
126
  _VALID_FILE_MODES = {"r", "w", "a", "rb", "wb", "ab"}
77
127
 
78
128
  _PRESERVE_KWARGS = [
@@ -110,29 +160,46 @@ buck_acls = {"private", "public-read", "public-read-write", "authenticated-read"
110
160
  async def _error_wrapper(func, *, args=(), kwargs=None, retries):
111
161
  if kwargs is None:
112
162
  kwargs = {}
163
+ err = None
113
164
  for i in range(retries):
165
+ wait_time = min(1.7**i * 0.1, 15)
166
+
114
167
  try:
115
168
  return await func(*args, **kwargs)
116
169
  except S3_RETRYABLE_ERRORS as e:
117
170
  err = e
118
171
  logger.debug("Retryable error: %s", e)
119
- await asyncio.sleep(min(1.7**i * 0.1, 15))
172
+ await asyncio.sleep(wait_time)
120
173
  except ClientError as e:
121
174
  logger.debug("Client error (maybe retryable): %s", e)
122
175
  err = e
123
- wait_time = min(1.7**i * 0.1, 15)
124
- if "SlowDown" in str(e):
125
- await asyncio.sleep(wait_time)
126
- elif "reduce your request rate" in str(e):
127
- await asyncio.sleep(wait_time)
128
- elif "XAmzContentSHA256Mismatch" in str(e):
176
+
177
+ matched = False
178
+ for pattern in [
179
+ "SlowDown",
180
+ "reduce your request rate",
181
+ "XAmzContentSHA256Mismatch",
182
+ ]:
183
+ if pattern in str(e):
184
+ matched = True
185
+ break
186
+
187
+ if matched:
129
188
  await asyncio.sleep(wait_time)
130
189
  else:
131
- break
190
+ should_retry = CUSTOM_ERROR_HANDLER(e)
191
+ if should_retry:
192
+ await asyncio.sleep(wait_time)
193
+ else:
194
+ break
132
195
  except Exception as e:
133
- logger.debug("Nonretryable error: %s", e)
134
196
  err = e
135
- break
197
+ should_retry = CUSTOM_ERROR_HANDLER(e)
198
+ if should_retry:
199
+ await asyncio.sleep(wait_time)
200
+ else:
201
+ logger.debug("Nonretryable error: %s", e)
202
+ break
136
203
 
137
204
  if "'coroutine'" in str(err):
138
205
  # aiobotocore internal error - fetch original botocore error
@@ -470,6 +537,7 @@ class S3FileSystem(AsyncFileSystem):
470
537
  >>> split_path("s3://mybucket/path/to/versioned_file?versionId=some_version_id")
471
538
  ['mybucket', 'path/to/versioned_file', 'some_version_id']
472
539
  """
540
+ trail = path[len(path.rstrip("/")) :]
473
541
  path = self._strip_protocol(path)
474
542
  path = path.lstrip("/")
475
543
  if "/" not in path:
@@ -477,6 +545,7 @@ class S3FileSystem(AsyncFileSystem):
477
545
  else:
478
546
  bucket, keypart = self._find_bucket_key(path)
479
547
  key, _, version_id = keypart.partition("?versionId=")
548
+ key += trail # restore trailing slashes removed by AbstractFileSystem._strip_protocol
480
549
  return (
481
550
  bucket,
482
551
  key,
@@ -519,7 +588,12 @@ class S3FileSystem(AsyncFileSystem):
519
588
  >>> s3.connect(refresh=True) # doctest: +SKIP
520
589
  """
521
590
  if self._s3 is not None and not refresh:
522
- return self._s3
591
+ hsess = getattr(getattr(self._s3, "_endpoint", None), "http_session", None)
592
+ if hsess is not None:
593
+ if all(_.closed for _ in hsess._sessions.values()):
594
+ refresh = True
595
+ if not refresh:
596
+ return self._s3
523
597
  logger.debug("Setting up s3fs instance")
524
598
 
525
599
  client_kwargs = self.client_kwargs.copy()
@@ -0,0 +1,255 @@
1
+ """Tests for custom error handler functionality."""
2
+
3
+ import asyncio
4
+ import pytest
5
+ from botocore.exceptions import ClientError
6
+
7
+ import s3fs.core
8
+ from s3fs.core import (
9
+ S3FileSystem,
10
+ _error_wrapper,
11
+ set_custom_error_handler,
12
+ add_retryable_error,
13
+ )
14
+
15
+
16
+ # Custom exception types for testing
17
+ class CustomRetryableError(Exception):
18
+ """A custom exception that should be retried."""
19
+
20
+ pass
21
+
22
+
23
+ class CustomNonRetryableError(Exception):
24
+ """A custom exception that should not be retried."""
25
+
26
+ pass
27
+
28
+
29
+ @pytest.fixture(autouse=True)
30
+ def reset_error_handler():
31
+ """Reset the custom error handler and retryable errors after each test."""
32
+ original_errors = s3fs.core.S3_RETRYABLE_ERRORS
33
+ yield
34
+ # Reset to default handler
35
+ s3fs.core.CUSTOM_ERROR_HANDLER = lambda e: False
36
+ # Reset retryable errors tuple
37
+ s3fs.core.S3_RETRYABLE_ERRORS = original_errors
38
+
39
+
40
+ def test_handler_retry_on_custom_exception():
41
+ """Test that custom error handler allows retrying on custom exceptions."""
42
+ call_count = 0
43
+
44
+ async def failing_func():
45
+ nonlocal call_count
46
+ call_count += 1
47
+ if call_count < 3:
48
+ raise CustomRetryableError("Custom error that should retry")
49
+ return "success"
50
+
51
+ # Set up custom handler to retry CustomRetryableError
52
+ def custom_handler(e):
53
+ return isinstance(e, CustomRetryableError)
54
+
55
+ set_custom_error_handler(custom_handler)
56
+
57
+ # Should retry and eventually succeed
58
+ async def run_test():
59
+ result = await _error_wrapper(failing_func, retries=5)
60
+ assert result == "success"
61
+ assert call_count == 3 # Failed twice, succeeded on third attempt
62
+
63
+ asyncio.run(run_test())
64
+
65
+
66
+ def test_handler_no_retry_on_other_exception():
67
+ """Test that custom error handler does not retry exceptions it doesn't handle."""
68
+ call_count = 0
69
+
70
+ async def failing_func():
71
+ nonlocal call_count
72
+ call_count += 1
73
+ raise CustomNonRetryableError("Custom error that should not retry")
74
+
75
+ # Set up custom handler that only retries CustomRetryableError
76
+ def custom_handler(e):
77
+ return isinstance(e, CustomRetryableError)
78
+
79
+ set_custom_error_handler(custom_handler)
80
+
81
+ # Should not retry and fail immediately
82
+ async def run_test():
83
+ with pytest.raises(CustomNonRetryableError):
84
+ await _error_wrapper(failing_func, retries=5)
85
+
86
+ assert call_count == 1 # Should only be called once
87
+
88
+ asyncio.run(run_test())
89
+
90
+
91
+ def test_handler_with_client_error():
92
+ """Test that custom handler can make ClientError retryable."""
93
+ call_count = 0
94
+
95
+ async def failing_func():
96
+ nonlocal call_count
97
+ call_count += 1
98
+ if call_count < 3:
99
+ # Create a ClientError that doesn't match the built-in retry patterns
100
+ error_response = {
101
+ "Error": {
102
+ "Code": "CustomThrottlingError",
103
+ "Message": "Custom throttling message",
104
+ }
105
+ }
106
+ raise ClientError(error_response, "operation_name")
107
+ return "success"
108
+
109
+ # Set up custom handler to retry on specific ClientError codes
110
+ def custom_handler(e):
111
+ if isinstance(e, ClientError):
112
+ return e.response.get("Error", {}).get("Code") == "CustomThrottlingError"
113
+ return False
114
+
115
+ set_custom_error_handler(custom_handler)
116
+
117
+ # Should retry and eventually succeed
118
+ async def run_test():
119
+ result = await _error_wrapper(failing_func, retries=5)
120
+ assert result == "success"
121
+ assert call_count == 3
122
+
123
+ asyncio.run(run_test())
124
+
125
+
126
+ def test_handler_preserves_builtin_retry_pattern():
127
+ """Test that custom handler doesn't interfere with built-in retry logic."""
128
+ call_count = 0
129
+
130
+ async def failing_func():
131
+ nonlocal call_count
132
+ call_count += 1
133
+ if call_count < 3:
134
+ # SlowDown is a built-in retryable pattern
135
+ error_response = {
136
+ "Error": {
137
+ "Code": "SlowDown",
138
+ "Message": "Please reduce your request rate",
139
+ }
140
+ }
141
+ raise ClientError(error_response, "operation_name")
142
+ return "success"
143
+
144
+ # Set up a custom handler that handles something else
145
+ def custom_handler(e):
146
+ return isinstance(e, CustomRetryableError)
147
+
148
+ set_custom_error_handler(custom_handler)
149
+
150
+ # Should still retry SlowDown errors due to built-in logic
151
+ async def run_test():
152
+ result = await _error_wrapper(failing_func, retries=5)
153
+ assert result == "success"
154
+ assert call_count == 3
155
+
156
+ asyncio.run(run_test())
157
+
158
+
159
+ def test_handler_max_retries():
160
+ """Test that custom handler respects max retries."""
161
+ call_count = 0
162
+
163
+ async def always_failing_func():
164
+ nonlocal call_count
165
+ call_count += 1
166
+ raise CustomRetryableError("Always fails")
167
+
168
+ def custom_handler(e):
169
+ return isinstance(e, CustomRetryableError)
170
+
171
+ set_custom_error_handler(custom_handler)
172
+
173
+ # Should retry up to retries limit then raise
174
+ async def run_test():
175
+ with pytest.raises(CustomRetryableError):
176
+ await _error_wrapper(always_failing_func, retries=3)
177
+
178
+ assert call_count == 3
179
+
180
+ asyncio.run(run_test())
181
+
182
+
183
+ def test_handler_sleep_behavior():
184
+ """Test that retries due to custom handler also wait between attempts."""
185
+ call_times = []
186
+
187
+ async def failing_func():
188
+ call_times.append(asyncio.get_event_loop().time())
189
+ raise CustomRetryableError("Retry me")
190
+
191
+ def custom_handler(e):
192
+ return isinstance(e, CustomRetryableError)
193
+
194
+ set_custom_error_handler(custom_handler)
195
+
196
+ async def run_test():
197
+ with pytest.raises(CustomRetryableError):
198
+ await _error_wrapper(failing_func, retries=3)
199
+
200
+ # Should have made 3 attempts
201
+ assert len(call_times) == 3
202
+
203
+ # Check that there was a delay between attempts
204
+ # The wait time formula is min(1.7**i * 0.1, 15)
205
+ # For i=0: min(0.1, 15) = 0.1
206
+ # For i=1: min(0.17, 15) = 0.17
207
+ if len(call_times) >= 2:
208
+ time_between_first_and_second = call_times[1] - call_times[0]
209
+ # Should be roughly 0.1 seconds (with some tolerance)
210
+ assert time_between_first_and_second >= 0.05
211
+
212
+ asyncio.run(run_test())
213
+
214
+
215
+ def test_default_handler():
216
+ """Test behavior when custom handler is not set explicitly."""
217
+ call_count = 0
218
+
219
+ async def failing_func():
220
+ nonlocal call_count
221
+ call_count += 1
222
+ raise ValueError("Regular exception")
223
+
224
+ # Don't set a custom handler, use default (returns False)
225
+ # Should not retry regular exceptions
226
+ async def run_test():
227
+ with pytest.raises(ValueError):
228
+ await _error_wrapper(failing_func, retries=5)
229
+
230
+ assert call_count == 1
231
+
232
+ asyncio.run(run_test())
233
+
234
+
235
+ def test_add_retryable_error():
236
+ """Test adding a custom exception to the retryable errors tuple."""
237
+ call_count = 0
238
+
239
+ async def failing_func():
240
+ nonlocal call_count
241
+ call_count += 1
242
+ if call_count < 3:
243
+ raise CustomRetryableError("Custom error")
244
+ return "success"
245
+
246
+ # Add CustomRetryableError to the retryable errors
247
+ add_retryable_error(CustomRetryableError)
248
+
249
+ # Should now be retried automatically without custom handler
250
+ async def run_test():
251
+ result = await _error_wrapper(failing_func, retries=5)
252
+ assert result == "success"
253
+ assert call_count == 3
254
+
255
+ asyncio.run(run_test())
@@ -3068,3 +3068,31 @@ def test_find_missing_ls(s3):
3068
3068
  listed_no_cache = s3_no_cache.ls(BASE, detail=False)
3069
3069
 
3070
3070
  assert set(listed_cached) == set(listed_no_cache)
3071
+
3072
+
3073
+ def test_session_close():
3074
+ async def run_program(run):
3075
+ s3 = s3fs.S3FileSystem(anon=True, asynchronous=True)
3076
+ session = await s3.set_session()
3077
+ files = await s3._ls(
3078
+ "s3://noaa-hrrr-bdp-pds/hrrr.20140730/conus/"
3079
+ ) # Random open data store
3080
+ print(f"Number of files {len(files)}")
3081
+ await session.close()
3082
+
3083
+ import aiobotocore.httpsession
3084
+
3085
+ aiobotocore.httpsession.AIOHTTPSession
3086
+ asyncio.run(run_program(True))
3087
+ asyncio.run(run_program(False))
3088
+
3089
+
3090
+ def test_rm_recursive_prfix(s3):
3091
+ prefix = "logs/" # must end with "/"
3092
+
3093
+ # Create empty "directory" in S3
3094
+ client = get_boto3_client()
3095
+ client.put_object(Bucket=test_bucket_name, Key=prefix, Body=b"")
3096
+ logs_path = f"s3://{test_bucket_name}/{prefix}"
3097
+ s3.rm(logs_path, recursive=True)
3098
+ assert not s3.isdir(logs_path)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: s3fs
3
- Version: 2025.12.0
3
+ Version: 2026.2.0
4
4
  Summary: Convenient Filesystem interface over S3
5
5
  Home-page: http://github.com/fsspec/s3fs/
6
6
  Maintainer: Martin Durant
@@ -19,8 +19,8 @@ Classifier: Programming Language :: Python :: 3.14
19
19
  Requires-Python: >= 3.10
20
20
  Description-Content-Type: text/markdown
21
21
  License-File: LICENSE.txt
22
- Requires-Dist: aiobotocore<3.0.0,>=2.5.4
23
- Requires-Dist: fsspec==2025.12.0
22
+ Requires-Dist: aiobotocore<4.0.0,>=2.19.0
23
+ Requires-Dist: fsspec==2026.2.0
24
24
  Requires-Dist: aiohttp!=4.0.0a0,!=4.0.0a1
25
25
  Dynamic: classifier
26
26
  Dynamic: description
@@ -24,6 +24,7 @@ s3fs.egg-info/not-zip-safe
24
24
  s3fs.egg-info/requires.txt
25
25
  s3fs.egg-info/top_level.txt
26
26
  s3fs/tests/__init__.py
27
+ s3fs/tests/test_custom_error_handler.py
27
28
  s3fs/tests/test_mapping.py
28
29
  s3fs/tests/test_s3fs.py
29
30
  s3fs/tests/test_utils.py
@@ -0,0 +1,3 @@
1
+ aiobotocore<4.0.0,>=2.19.0
2
+ fsspec==2026.2.0
3
+ aiohttp!=4.0.0a0,!=4.0.0a1
@@ -1,3 +0,0 @@
1
- aiobotocore>=2.5.4,<3.0.0
2
- fsspec==2025.12.0
3
- aiohttp!=4.0.0a0, !=4.0.0a1
@@ -1,3 +0,0 @@
1
- aiobotocore<3.0.0,>=2.5.4
2
- fsspec==2025.12.0
3
- aiohttp!=4.0.0a0,!=4.0.0a1
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes