provide-foundation 0.0.0.dev1__py3-none-any.whl → 0.0.0.dev3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (163) hide show
  1. provide/foundation/__init__.py +36 -10
  2. provide/foundation/archive/__init__.py +1 -1
  3. provide/foundation/archive/base.py +15 -14
  4. provide/foundation/archive/bzip2.py +40 -40
  5. provide/foundation/archive/gzip.py +42 -42
  6. provide/foundation/archive/operations.py +93 -96
  7. provide/foundation/archive/tar.py +33 -31
  8. provide/foundation/archive/zip.py +52 -50
  9. provide/foundation/asynctools/__init__.py +20 -0
  10. provide/foundation/asynctools/core.py +126 -0
  11. provide/foundation/cli/__init__.py +2 -2
  12. provide/foundation/cli/commands/deps.py +15 -9
  13. provide/foundation/cli/commands/logs/__init__.py +3 -3
  14. provide/foundation/cli/commands/logs/generate.py +2 -2
  15. provide/foundation/cli/commands/logs/query.py +4 -4
  16. provide/foundation/cli/commands/logs/send.py +3 -3
  17. provide/foundation/cli/commands/logs/tail.py +3 -3
  18. provide/foundation/cli/decorators.py +11 -11
  19. provide/foundation/cli/main.py +1 -1
  20. provide/foundation/cli/testing.py +2 -40
  21. provide/foundation/cli/utils.py +21 -18
  22. provide/foundation/config/__init__.py +35 -2
  23. provide/foundation/config/base.py +2 -2
  24. provide/foundation/config/converters.py +477 -0
  25. provide/foundation/config/defaults.py +67 -0
  26. provide/foundation/config/env.py +6 -20
  27. provide/foundation/config/loader.py +10 -4
  28. provide/foundation/config/sync.py +8 -6
  29. provide/foundation/config/types.py +5 -5
  30. provide/foundation/config/validators.py +4 -4
  31. provide/foundation/console/input.py +5 -5
  32. provide/foundation/console/output.py +36 -14
  33. provide/foundation/context/__init__.py +8 -4
  34. provide/foundation/context/core.py +88 -110
  35. provide/foundation/crypto/certificates/__init__.py +9 -5
  36. provide/foundation/crypto/certificates/base.py +2 -2
  37. provide/foundation/crypto/certificates/certificate.py +48 -19
  38. provide/foundation/crypto/certificates/factory.py +26 -18
  39. provide/foundation/crypto/certificates/generator.py +24 -23
  40. provide/foundation/crypto/certificates/loader.py +24 -16
  41. provide/foundation/crypto/certificates/operations.py +17 -10
  42. provide/foundation/crypto/certificates/trust.py +21 -21
  43. provide/foundation/env/__init__.py +28 -0
  44. provide/foundation/env/core.py +218 -0
  45. provide/foundation/errors/__init__.py +3 -3
  46. provide/foundation/errors/decorators.py +0 -234
  47. provide/foundation/errors/types.py +0 -98
  48. provide/foundation/eventsets/display.py +13 -14
  49. provide/foundation/eventsets/registry.py +61 -31
  50. provide/foundation/eventsets/resolver.py +50 -46
  51. provide/foundation/eventsets/sets/das.py +8 -8
  52. provide/foundation/eventsets/sets/database.py +14 -14
  53. provide/foundation/eventsets/sets/http.py +21 -21
  54. provide/foundation/eventsets/sets/llm.py +16 -16
  55. provide/foundation/eventsets/sets/task_queue.py +13 -13
  56. provide/foundation/eventsets/types.py +7 -7
  57. provide/foundation/file/directory.py +14 -23
  58. provide/foundation/file/lock.py +4 -3
  59. provide/foundation/hub/components.py +75 -389
  60. provide/foundation/hub/config.py +157 -0
  61. provide/foundation/hub/discovery.py +63 -0
  62. provide/foundation/hub/handlers.py +89 -0
  63. provide/foundation/hub/lifecycle.py +195 -0
  64. provide/foundation/hub/manager.py +7 -4
  65. provide/foundation/hub/processors.py +49 -0
  66. provide/foundation/integrations/__init__.py +11 -0
  67. provide/foundation/{observability → integrations}/openobserve/__init__.py +10 -7
  68. provide/foundation/{observability → integrations}/openobserve/auth.py +1 -1
  69. provide/foundation/{observability → integrations}/openobserve/client.py +14 -14
  70. provide/foundation/{observability → integrations}/openobserve/commands.py +12 -12
  71. provide/foundation/integrations/openobserve/config.py +37 -0
  72. provide/foundation/{observability → integrations}/openobserve/formatters.py +1 -1
  73. provide/foundation/{observability → integrations}/openobserve/otlp.py +2 -2
  74. provide/foundation/{observability → integrations}/openobserve/search.py +2 -3
  75. provide/foundation/{observability → integrations}/openobserve/streaming.py +5 -5
  76. provide/foundation/logger/__init__.py +0 -1
  77. provide/foundation/logger/config/base.py +1 -1
  78. provide/foundation/logger/config/logging.py +69 -299
  79. provide/foundation/logger/config/telemetry.py +39 -121
  80. provide/foundation/logger/factories.py +2 -2
  81. provide/foundation/logger/processors/main.py +12 -10
  82. provide/foundation/logger/ratelimit/limiters.py +4 -4
  83. provide/foundation/logger/ratelimit/processor.py +1 -1
  84. provide/foundation/logger/setup/coordinator.py +39 -25
  85. provide/foundation/logger/setup/processors.py +3 -3
  86. provide/foundation/logger/setup/testing.py +14 -0
  87. provide/foundation/logger/trace.py +5 -5
  88. provide/foundation/metrics/__init__.py +1 -1
  89. provide/foundation/metrics/otel.py +3 -1
  90. provide/foundation/observability/__init__.py +3 -3
  91. provide/foundation/process/__init__.py +9 -0
  92. provide/foundation/process/exit.py +48 -0
  93. provide/foundation/process/lifecycle.py +69 -46
  94. provide/foundation/resilience/__init__.py +36 -0
  95. provide/foundation/resilience/circuit.py +166 -0
  96. provide/foundation/resilience/decorators.py +236 -0
  97. provide/foundation/resilience/fallback.py +208 -0
  98. provide/foundation/resilience/retry.py +327 -0
  99. provide/foundation/serialization/__init__.py +16 -0
  100. provide/foundation/serialization/core.py +70 -0
  101. provide/foundation/streams/config.py +78 -0
  102. provide/foundation/streams/console.py +4 -5
  103. provide/foundation/streams/core.py +5 -2
  104. provide/foundation/streams/file.py +12 -2
  105. provide/foundation/testing/__init__.py +29 -9
  106. provide/foundation/testing/archive/__init__.py +7 -7
  107. provide/foundation/testing/archive/fixtures.py +58 -54
  108. provide/foundation/testing/cli.py +30 -20
  109. provide/foundation/testing/common/__init__.py +13 -15
  110. provide/foundation/testing/common/fixtures.py +27 -57
  111. provide/foundation/testing/file/__init__.py +15 -15
  112. provide/foundation/testing/file/content_fixtures.py +289 -0
  113. provide/foundation/testing/file/directory_fixtures.py +107 -0
  114. provide/foundation/testing/file/fixtures.py +42 -516
  115. provide/foundation/testing/file/special_fixtures.py +145 -0
  116. provide/foundation/testing/logger.py +89 -8
  117. provide/foundation/testing/mocking/__init__.py +21 -21
  118. provide/foundation/testing/mocking/fixtures.py +80 -67
  119. provide/foundation/testing/process/__init__.py +23 -23
  120. provide/foundation/testing/process/async_fixtures.py +414 -0
  121. provide/foundation/testing/process/fixtures.py +48 -571
  122. provide/foundation/testing/process/subprocess_fixtures.py +210 -0
  123. provide/foundation/testing/threading/__init__.py +17 -17
  124. provide/foundation/testing/threading/basic_fixtures.py +105 -0
  125. provide/foundation/testing/threading/data_fixtures.py +101 -0
  126. provide/foundation/testing/threading/execution_fixtures.py +278 -0
  127. provide/foundation/testing/threading/fixtures.py +32 -502
  128. provide/foundation/testing/threading/sync_fixtures.py +100 -0
  129. provide/foundation/testing/time/__init__.py +11 -11
  130. provide/foundation/testing/time/fixtures.py +95 -83
  131. provide/foundation/testing/transport/__init__.py +9 -9
  132. provide/foundation/testing/transport/fixtures.py +54 -54
  133. provide/foundation/time/__init__.py +18 -0
  134. provide/foundation/time/core.py +63 -0
  135. provide/foundation/tools/__init__.py +2 -2
  136. provide/foundation/tools/base.py +68 -67
  137. provide/foundation/tools/cache.py +69 -74
  138. provide/foundation/tools/downloader.py +68 -62
  139. provide/foundation/tools/installer.py +51 -57
  140. provide/foundation/tools/registry.py +38 -45
  141. provide/foundation/tools/resolver.py +70 -68
  142. provide/foundation/tools/verifier.py +39 -50
  143. provide/foundation/tracer/spans.py +2 -14
  144. provide/foundation/transport/__init__.py +26 -33
  145. provide/foundation/transport/base.py +32 -30
  146. provide/foundation/transport/client.py +44 -49
  147. provide/foundation/transport/config.py +36 -107
  148. provide/foundation/transport/errors.py +13 -27
  149. provide/foundation/transport/http.py +69 -55
  150. provide/foundation/transport/middleware.py +113 -114
  151. provide/foundation/transport/registry.py +29 -27
  152. provide/foundation/transport/types.py +6 -6
  153. provide/foundation/utils/deps.py +17 -14
  154. provide/foundation/utils/parsing.py +49 -4
  155. {provide_foundation-0.0.0.dev1.dist-info → provide_foundation-0.0.0.dev3.dist-info}/METADATA +2 -2
  156. provide_foundation-0.0.0.dev3.dist-info/RECORD +233 -0
  157. provide_foundation-0.0.0.dev1.dist-info/RECORD +0 -200
  158. /provide/foundation/{observability → integrations}/openobserve/exceptions.py +0 -0
  159. /provide/foundation/{observability → integrations}/openobserve/models.py +0 -0
  160. {provide_foundation-0.0.0.dev1.dist-info → provide_foundation-0.0.0.dev3.dist-info}/WHEEL +0 -0
  161. {provide_foundation-0.0.0.dev1.dist-info → provide_foundation-0.0.0.dev3.dist-info}/entry_points.txt +0 -0
  162. {provide_foundation-0.0.0.dev1.dist-info → provide_foundation-0.0.0.dev3.dist-info}/licenses/LICENSE +0 -0
  163. {provide_foundation-0.0.0.dev1.dist-info → provide_foundation-0.0.0.dev3.dist-info}/top_level.txt +0 -0
@@ -5,11 +5,13 @@ Provides TTL-based caching to avoid re-downloading tools
5
5
  that are already installed and valid.
6
6
  """
7
7
 
8
- import json
9
8
  from datetime import datetime, timedelta
9
+ import json
10
10
  from pathlib import Path
11
11
 
12
12
  from provide.foundation.errors import FoundationError
13
+ from provide.foundation.file.atomic import atomic_write
14
+ from provide.foundation.file.safe import safe_read_text
13
15
  from provide.foundation.logger import get_logger
14
16
 
15
17
  log = get_logger(__name__)
@@ -17,99 +19,93 @@ log = get_logger(__name__)
17
19
 
18
20
  class CacheError(FoundationError):
19
21
  """Raised when cache operations fail."""
20
-
22
+
21
23
  pass
22
24
 
23
25
 
24
26
  class ToolCache:
25
27
  """
26
28
  Cache for installed tools with TTL support.
27
-
29
+
28
30
  Tracks installed tool locations and expiration times to
29
31
  avoid unnecessary re-downloads and installations.
30
32
  """
31
-
32
- def __init__(self, cache_dir: Path | None = None):
33
+
34
+ def __init__(self, cache_dir: Path | None = None) -> None:
33
35
  """
34
36
  Initialize the cache.
35
-
37
+
36
38
  Args:
37
39
  cache_dir: Cache directory (defaults to ~/.wrknv/cache).
38
40
  """
39
41
  self.cache_dir = cache_dir or (Path.home() / ".wrknv" / "cache")
40
42
  self.cache_dir.mkdir(parents=True, exist_ok=True)
41
-
43
+
42
44
  self.metadata_file = self.cache_dir / "metadata.json"
43
45
  self.metadata = self._load_metadata()
44
-
46
+
45
47
  def _load_metadata(self) -> dict[str, dict]:
46
48
  """
47
49
  Load cache metadata from disk.
48
-
50
+
49
51
  Returns:
50
52
  Cache metadata dictionary.
51
53
  """
52
- if self.metadata_file.exists():
54
+ content = safe_read_text(self.metadata_file, default="{}")
55
+ if content:
53
56
  try:
54
- with self.metadata_file.open() as f:
55
- return json.load(f)
57
+ return json.loads(content)
56
58
  except Exception as e:
57
- log.warning(f"Failed to load cache metadata: {e}")
58
-
59
+ log.warning(f"Failed to parse cache metadata: {e}")
60
+
59
61
  return {}
60
-
62
+
61
63
  def _save_metadata(self) -> None:
62
64
  """Save cache metadata to disk."""
63
65
  try:
64
- with self.metadata_file.open("w") as f:
65
- json.dump(self.metadata, f, indent=2)
66
+ content = json.dumps(self.metadata, indent=2)
67
+ atomic_write(self.metadata_file, content.encode("utf-8"))
66
68
  except Exception as e:
67
69
  log.error(f"Failed to save cache metadata: {e}")
68
-
70
+
69
71
  def get(self, tool: str, version: str) -> Path | None:
70
72
  """
71
73
  Get cached tool path if valid.
72
-
74
+
73
75
  Args:
74
76
  tool: Tool name.
75
77
  version: Tool version.
76
-
78
+
77
79
  Returns:
78
80
  Path to cached tool if valid, None otherwise.
79
81
  """
80
82
  key = f"{tool}:{version}"
81
-
83
+
82
84
  if entry := self.metadata.get(key):
83
85
  path = Path(entry["path"])
84
-
86
+
85
87
  # Check if path exists
86
88
  if not path.exists():
87
89
  log.debug(f"Cache miss: {key} path doesn't exist")
88
90
  self.invalidate(tool, version)
89
91
  return None
90
-
92
+
91
93
  # Check if expired
92
94
  if self._is_expired(entry):
93
95
  log.debug(f"Cache miss: {key} expired")
94
96
  self.invalidate(tool, version)
95
97
  return None
96
-
98
+
97
99
  log.debug(f"Cache hit: {key}")
98
100
  return path
99
-
101
+
100
102
  log.debug(f"Cache miss: {key} not in cache")
101
103
  return None
102
-
103
- def store(
104
- self,
105
- tool: str,
106
- version: str,
107
- path: Path,
108
- ttl_days: int = 7
109
- ) -> None:
104
+
105
+ def store(self, tool: str, version: str, path: Path, ttl_days: int = 7) -> None:
110
106
  """
111
107
  Store tool in cache.
112
-
108
+
113
109
  Args:
114
110
  tool: Tool name.
115
111
  version: Tool version.
@@ -117,7 +113,7 @@ class ToolCache:
117
113
  ttl_days: Time-to-live in days.
118
114
  """
119
115
  key = f"{tool}:{version}"
120
-
116
+
121
117
  self.metadata[key] = {
122
118
  "path": str(path),
123
119
  "tool": tool,
@@ -125,14 +121,14 @@ class ToolCache:
125
121
  "cached_at": datetime.now().isoformat(),
126
122
  "ttl_days": ttl_days,
127
123
  }
128
-
124
+
129
125
  self._save_metadata()
130
126
  log.debug(f"Cached {key} at {path} (TTL: {ttl_days} days)")
131
-
127
+
132
128
  def invalidate(self, tool: str, version: str | None = None) -> None:
133
129
  """
134
130
  Invalidate cache entries.
135
-
131
+
136
132
  Args:
137
133
  tool: Tool name.
138
134
  version: Specific version, or None for all versions.
@@ -146,60 +142,59 @@ class ToolCache:
146
142
  else:
147
143
  # Invalidate all versions of tool
148
144
  keys_to_remove = [
149
- k for k in self.metadata
150
- if self.metadata[k].get("tool") == tool
145
+ k for k in self.metadata if self.metadata[k].get("tool") == tool
151
146
  ]
152
147
  for key in keys_to_remove:
153
148
  del self.metadata[key]
154
149
  log.debug(f"Invalidated cache for {key}")
155
-
150
+
156
151
  self._save_metadata()
157
-
152
+
158
153
  def _is_expired(self, entry: dict) -> bool:
159
154
  """
160
155
  Check if cache entry is expired.
161
-
156
+
162
157
  Args:
163
158
  entry: Cache entry dictionary.
164
-
159
+
165
160
  Returns:
166
161
  True if expired, False otherwise.
167
162
  """
168
163
  try:
169
164
  cached_at = datetime.fromisoformat(entry["cached_at"])
170
165
  ttl_days = entry.get("ttl_days", 7)
171
-
166
+
172
167
  if ttl_days <= 0:
173
168
  # Never expires
174
169
  return False
175
-
170
+
176
171
  expiry = cached_at + timedelta(days=ttl_days)
177
172
  return datetime.now() > expiry
178
173
  except Exception as e:
179
174
  log.debug(f"Error checking expiry: {e}")
180
175
  return True # Treat as expired if we can't determine
181
-
176
+
182
177
  def clear(self) -> None:
183
178
  """Clear all cache entries."""
184
179
  self.metadata = {}
185
180
  self._save_metadata()
186
181
  log.info("Cleared tool cache")
187
-
182
+
188
183
  def list_cached(self) -> list[dict]:
189
184
  """
190
185
  List all cached tools.
191
-
186
+
192
187
  Returns:
193
188
  List of cache entries with metadata.
194
189
  """
195
190
  results = []
196
-
191
+
197
192
  for key, entry in self.metadata.items():
198
193
  # Add expiry status
199
194
  entry = entry.copy()
200
195
  entry["key"] = key
201
196
  entry["expired"] = self._is_expired(entry)
202
-
197
+
203
198
  # Calculate days until expiry
204
199
  try:
205
200
  cached_at = datetime.fromisoformat(entry["cached_at"])
@@ -212,55 +207,55 @@ class ToolCache:
212
207
  entry["days_until_expiry"] = -1 # Never expires
213
208
  except Exception:
214
209
  entry["days_until_expiry"] = 0
215
-
210
+
216
211
  results.append(entry)
217
-
212
+
218
213
  return results
219
-
214
+
220
215
  def get_size(self) -> int:
221
216
  """
222
217
  Get total size of cached tools in bytes.
223
-
218
+
224
219
  Returns:
225
220
  Total size in bytes.
226
221
  """
227
222
  total = 0
228
-
223
+
229
224
  for entry in self.metadata.values():
230
225
  path = Path(entry["path"])
231
- if path.exists():
232
- try:
226
+ try:
227
+ if path.exists():
233
228
  # Calculate directory size
234
229
  if path.is_dir():
235
- total += sum(
236
- f.stat().st_size
237
- for f in path.rglob("*")
238
- if f.is_file()
239
- )
230
+ for f in path.rglob("*"):
231
+ if f.is_file():
232
+ try:
233
+ total += f.stat().st_size
234
+ except Exception as e:
235
+ log.debug(f"Failed to get size of file {f}: {e}")
240
236
  else:
241
237
  total += path.stat().st_size
242
- except Exception as e:
243
- log.debug(f"Failed to get size of {path}: {e}")
244
-
238
+ except Exception as e:
239
+ log.debug(f"Failed to get size of {path}: {e}")
240
+
245
241
  return total
246
-
242
+
247
243
  def prune_expired(self) -> int:
248
244
  """
249
245
  Remove expired entries from cache.
250
-
246
+
251
247
  Returns:
252
248
  Number of entries removed.
253
249
  """
254
250
  expired_keys = [
255
- key for key, entry in self.metadata.items()
256
- if self._is_expired(entry)
251
+ key for key, entry in self.metadata.items() if self._is_expired(entry)
257
252
  ]
258
-
253
+
259
254
  for key in expired_keys:
260
255
  del self.metadata[key]
261
-
256
+
262
257
  if expired_keys:
263
258
  self._save_metadata()
264
259
  log.info(f"Pruned {len(expired_keys)} expired cache entries")
265
-
266
- return len(expired_keys)
260
+
261
+ return len(expired_keys)
@@ -5,13 +5,14 @@ Provides capabilities for downloading tools with progress tracking,
5
5
  parallel downloads, and mirror support.
6
6
  """
7
7
 
8
- import hashlib
9
8
  from concurrent.futures import ThreadPoolExecutor
9
+ import hashlib
10
10
  from pathlib import Path
11
11
  from typing import Callable
12
12
 
13
13
  from provide.foundation.errors import FoundationError
14
14
  from provide.foundation.logger import get_logger
15
+ from provide.foundation.resilience import retry
15
16
  from provide.foundation.transport import UniversalClient
16
17
 
17
18
  log = get_logger(__name__)
@@ -19,48 +20,48 @@ log = get_logger(__name__)
19
20
 
20
21
  class DownloadError(FoundationError):
21
22
  """Raised when download fails."""
22
-
23
+
23
24
  pass
24
25
 
25
26
 
26
27
  class ToolDownloader:
27
28
  """
28
29
  Advanced download capabilities for tools.
29
-
30
+
30
31
  Features:
31
32
  - Progress reporting with callbacks
32
33
  - Parallel downloads for multiple files
33
34
  - Mirror fallback support
34
35
  - Checksum verification
35
-
36
+
36
37
  Attributes:
37
38
  client: Transport client for HTTP requests.
38
39
  progress_callbacks: List of progress callback functions.
39
40
  """
40
-
41
+
41
42
  def __init__(self, client: UniversalClient):
42
43
  """
43
44
  Initialize the downloader.
44
-
45
+
45
46
  Args:
46
47
  client: Universal client for making HTTP requests.
47
48
  """
48
49
  self.client = client
49
50
  self.progress_callbacks: list[Callable[[int, int], None]] = []
50
-
51
+
51
52
  def add_progress_callback(self, callback: Callable[[int, int], None]) -> None:
52
53
  """
53
54
  Add a progress callback.
54
-
55
+
55
56
  Args:
56
57
  callback: Function that receives (downloaded_bytes, total_bytes).
57
58
  """
58
59
  self.progress_callbacks.append(callback)
59
-
60
+
60
61
  def _report_progress(self, downloaded: int, total: int) -> None:
61
62
  """
62
63
  Report progress to all callbacks.
63
-
64
+
64
65
  Args:
65
66
  downloaded: Bytes downloaded so far.
66
67
  total: Total bytes to download (0 if unknown).
@@ -70,100 +71,95 @@ class ToolDownloader:
70
71
  callback(downloaded, total)
71
72
  except Exception as e:
72
73
  log.warning(f"Progress callback failed: {e}")
73
-
74
+
75
+ @retry(max_attempts=3, base_delay=1.0)
74
76
  def download_with_progress(
75
- self,
76
- url: str,
77
- dest: Path,
78
- checksum: str | None = None
77
+ self, url: str, dest: Path, checksum: str | None = None
79
78
  ) -> Path:
80
79
  """
81
80
  Download a file with progress reporting.
82
-
81
+
83
82
  Args:
84
83
  url: URL to download from.
85
84
  dest: Destination file path.
86
85
  checksum: Optional checksum for verification.
87
-
86
+
88
87
  Returns:
89
88
  Path to the downloaded file.
90
-
89
+
91
90
  Raises:
92
91
  DownloadError: If download or verification fails.
93
92
  """
94
93
  log.debug(f"Downloading {url} to {dest}")
95
-
94
+
96
95
  # Ensure parent directory exists
97
96
  dest.parent.mkdir(parents=True, exist_ok=True)
98
-
97
+
99
98
  # Stream download with progress
100
99
  with self.client.stream("GET", url) as response:
101
100
  # Get total size if available
102
101
  total_size = int(response.headers.get("content-length", 0))
103
102
  downloaded = 0
104
-
103
+
105
104
  # Write to file and report progress
106
105
  with dest.open("wb") as f:
107
106
  for chunk in response.iter_bytes(8192):
108
107
  f.write(chunk)
109
108
  downloaded += len(chunk)
110
109
  self._report_progress(downloaded, total_size)
111
-
110
+
112
111
  # Verify checksum if provided
113
112
  if checksum:
114
113
  if not self.verify_checksum(dest, checksum):
115
114
  dest.unlink()
116
115
  raise DownloadError(f"Checksum mismatch for {url}")
117
-
116
+
118
117
  log.info(f"Downloaded {url} successfully")
119
118
  return dest
120
-
119
+
121
120
  def verify_checksum(self, file_path: Path, expected: str) -> bool:
122
121
  """
123
122
  Verify file checksum.
124
-
123
+
125
124
  Args:
126
125
  file_path: Path to file to verify.
127
126
  expected: Expected checksum (hex string).
128
-
127
+
129
128
  Returns:
130
129
  True if checksum matches, False otherwise.
131
130
  """
132
131
  # Default to SHA256
133
132
  hasher = hashlib.sha256()
134
-
133
+
135
134
  with file_path.open("rb") as f:
136
135
  for chunk in iter(lambda: f.read(8192), b""):
137
136
  hasher.update(chunk)
138
-
137
+
139
138
  actual = hasher.hexdigest()
140
139
  return actual == expected
141
-
142
- def download_parallel(
143
- self,
144
- urls: list[tuple[str, Path]]
145
- ) -> list[Path]:
140
+
141
+ def download_parallel(self, urls: list[tuple[str, Path]]) -> list[Path]:
146
142
  """
147
143
  Download multiple files in parallel.
148
-
144
+
149
145
  Args:
150
146
  urls: List of (url, destination) tuples.
151
-
147
+
152
148
  Returns:
153
149
  List of downloaded file paths in the same order as input.
154
-
150
+
155
151
  Raises:
156
152
  DownloadError: If any download fails.
157
153
  """
158
154
  errors = []
159
-
155
+
160
156
  with ThreadPoolExecutor(max_workers=4) as executor:
161
157
  # Submit all downloads, maintaining order with index
162
158
  futures = [
163
159
  executor.submit(self.download_with_progress, url, dest)
164
160
  for url, dest in urls
165
161
  ]
166
-
162
+
167
163
  # Collect results in order
168
164
  results = []
169
165
  for i, future in enumerate(futures):
@@ -174,40 +170,50 @@ class ToolDownloader:
174
170
  except Exception as e:
175
171
  errors.append((url, e))
176
172
  log.error(f"Failed to download {url}: {e}")
177
-
173
+
178
174
  if errors:
179
175
  raise DownloadError(f"Some downloads failed: {errors}")
180
-
176
+
181
177
  return results
182
-
183
- def download_with_mirrors(
184
- self,
185
- mirrors: list[str],
186
- dest: Path
187
- ) -> Path:
178
+
179
+ def download_with_mirrors(self, mirrors: list[str], dest: Path) -> Path:
188
180
  """
189
- Try multiple mirrors until one succeeds.
190
-
181
+ Try multiple mirrors until one succeeds using fallback pattern.
182
+
191
183
  Args:
192
184
  mirrors: List of mirror URLs to try.
193
185
  dest: Destination file path.
194
-
186
+
195
187
  Returns:
196
188
  Path to downloaded file.
197
-
189
+
198
190
  Raises:
199
191
  DownloadError: If all mirrors fail.
200
192
  """
201
- errors = []
202
-
193
+ from provide.foundation.resilience.fallback import FallbackChain
194
+
195
+ if not mirrors:
196
+ raise DownloadError("No mirrors provided")
197
+
198
+ # Create fallback functions for each mirror
199
+ fallback_funcs = []
203
200
  for mirror_url in mirrors:
204
- try:
205
- log.debug(f"Trying mirror: {mirror_url}")
206
- return self.download_with_progress(mirror_url, dest)
207
- except Exception as e:
208
- log.warning(f"Mirror {mirror_url} failed: {e}")
209
- errors.append((mirror_url, str(e)))
210
- continue
211
-
212
- # All mirrors failed
213
- raise DownloadError(f"All mirrors failed: {errors}")
201
+
202
+ def create_mirror_func(url):
203
+ def mirror_download():
204
+ log.debug(f"Trying mirror: {url}")
205
+ return self.download_with_progress(url, dest)
206
+
207
+ return mirror_download
208
+
209
+ fallback_funcs.append(create_mirror_func(mirror_url))
210
+
211
+ # Use FallbackChain to try mirrors in order
212
+ chain = FallbackChain(
213
+ fallbacks=fallback_funcs[1:]
214
+ ) # All but first are fallbacks
215
+
216
+ try:
217
+ return chain.execute(fallback_funcs[0]) # First is primary
218
+ except Exception as e:
219
+ raise DownloadError(f"All mirrors failed: {e}")