synapse-filecoin-sdk 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. pynapse/__init__.py +6 -0
  2. pynapse/_version.py +1 -0
  3. pynapse/contracts/__init__.py +34 -0
  4. pynapse/contracts/abi_registry.py +11 -0
  5. pynapse/contracts/addresses.json +30 -0
  6. pynapse/contracts/erc20_abi.json +92 -0
  7. pynapse/contracts/errorsAbi.json +933 -0
  8. pynapse/contracts/filecoinPayV1Abi.json +2424 -0
  9. pynapse/contracts/filecoinWarmStorageServiceAbi.json +2363 -0
  10. pynapse/contracts/filecoinWarmStorageServiceStateViewAbi.json +651 -0
  11. pynapse/contracts/generated.py +35 -0
  12. pynapse/contracts/payments_abi.json +205 -0
  13. pynapse/contracts/pdpVerifierAbi.json +1266 -0
  14. pynapse/contracts/providerIdSetAbi.json +161 -0
  15. pynapse/contracts/serviceProviderRegistryAbi.json +1479 -0
  16. pynapse/contracts/sessionKeyRegistryAbi.json +147 -0
  17. pynapse/core/__init__.py +68 -0
  18. pynapse/core/abis.py +25 -0
  19. pynapse/core/chains.py +97 -0
  20. pynapse/core/constants.py +27 -0
  21. pynapse/core/errors.py +22 -0
  22. pynapse/core/piece.py +263 -0
  23. pynapse/core/rand.py +14 -0
  24. pynapse/core/typed_data.py +320 -0
  25. pynapse/core/utils.py +30 -0
  26. pynapse/evm/__init__.py +3 -0
  27. pynapse/evm/client.py +26 -0
  28. pynapse/filbeam/__init__.py +3 -0
  29. pynapse/filbeam/service.py +39 -0
  30. pynapse/payments/__init__.py +17 -0
  31. pynapse/payments/service.py +826 -0
  32. pynapse/pdp/__init__.py +21 -0
  33. pynapse/pdp/server.py +331 -0
  34. pynapse/pdp/types.py +38 -0
  35. pynapse/pdp/verifier.py +82 -0
  36. pynapse/retriever/__init__.py +12 -0
  37. pynapse/retriever/async_chain.py +227 -0
  38. pynapse/retriever/chain.py +209 -0
  39. pynapse/session/__init__.py +12 -0
  40. pynapse/session/key.py +30 -0
  41. pynapse/session/permissions.py +57 -0
  42. pynapse/session/registry.py +90 -0
  43. pynapse/sp_registry/__init__.py +11 -0
  44. pynapse/sp_registry/capabilities.py +25 -0
  45. pynapse/sp_registry/pdp_capabilities.py +102 -0
  46. pynapse/sp_registry/service.py +446 -0
  47. pynapse/sp_registry/types.py +52 -0
  48. pynapse/storage/__init__.py +57 -0
  49. pynapse/storage/async_context.py +682 -0
  50. pynapse/storage/async_manager.py +757 -0
  51. pynapse/storage/context.py +680 -0
  52. pynapse/storage/manager.py +758 -0
  53. pynapse/synapse.py +191 -0
  54. pynapse/utils/__init__.py +25 -0
  55. pynapse/utils/constants.py +25 -0
  56. pynapse/utils/errors.py +3 -0
  57. pynapse/utils/metadata.py +35 -0
  58. pynapse/utils/piece_url.py +16 -0
  59. pynapse/warm_storage/__init__.py +13 -0
  60. pynapse/warm_storage/service.py +513 -0
  61. synapse_filecoin_sdk-0.1.0.dist-info/METADATA +74 -0
  62. synapse_filecoin_sdk-0.1.0.dist-info/RECORD +64 -0
  63. synapse_filecoin_sdk-0.1.0.dist-info/WHEEL +4 -0
  64. synapse_filecoin_sdk-0.1.0.dist-info/licenses/LICENSE.md +228 -0
@@ -0,0 +1,757 @@
1
+ """
2
+ AsyncStorageManager - Central async facade for storage operations.
3
+
4
+ Manages storage contexts (SP + DataSet pairs) with intelligent provider selection
5
+ and dataset reuse. Fully async for Python async/await patterns.
6
+ """
7
+ from __future__ import annotations
8
+
9
+ import asyncio
10
+ from dataclasses import dataclass, field
11
+ from typing import Callable, Dict, List, Optional, Sequence, Awaitable, TYPE_CHECKING
12
+
13
+ from .async_context import AsyncStorageContext, AsyncStorageContextOptions, AsyncUploadResult
14
+
15
+ if TYPE_CHECKING:
16
+ from pynapse.retriever import AsyncChainRetriever
17
+ from pynapse.sp_registry import AsyncSPRegistryService
18
+ from pynapse.warm_storage import AsyncWarmStorageService
19
+
20
+
21
+ # Size and time constants matching TypeScript SDK
22
+ TIB = 1024 ** 4
23
+ EPOCHS_PER_DAY = 2880
24
+ DAYS_PER_MONTH = 30
25
+
26
+
27
+ @dataclass
28
+ class AsyncProviderFilter:
29
+ """Filter criteria for provider selection."""
30
+ provider_ids: Optional[List[int]] = None
31
+ with_cdn: bool = False
32
+ with_ipni: bool = False
33
+ min_piece_size: Optional[int] = None
34
+ max_piece_size: Optional[int] = None
35
+ location: Optional[str] = None
36
+ exclude_provider_ids: Optional[List[int]] = None
37
+
38
+
39
+ @dataclass
40
+ class AsyncPreflightInfo:
41
+ """Preflight estimation for storage costs."""
42
+ size_bytes: int
43
+ estimated_cost_per_epoch: int
44
+ estimated_total_cost: int
45
+ duration_epochs: int
46
+ provider_count: int
47
+ providers: List[int] = field(default_factory=list)
48
+
49
+
50
+ @dataclass
51
+ class AsyncDataSetMatch:
52
+ """A dataset that matches search criteria."""
53
+ data_set_id: int
54
+ client_data_set_id: int
55
+ provider_id: int
56
+ pdp_endpoint: str
57
+ metadata: Dict[str, str]
58
+
59
+
60
+ @dataclass
61
+ class AsyncStoragePricing:
62
+ """Pricing information per time unit."""
63
+ per_tib_per_month: int
64
+ per_tib_per_day: int
65
+ per_tib_per_epoch: int
66
+
67
+
68
+ @dataclass
69
+ class AsyncServiceParameters:
70
+ """Service configuration parameters."""
71
+ epochs_per_month: int
72
+ epochs_per_day: int = EPOCHS_PER_DAY
73
+ epoch_duration: int = 30 # seconds
74
+ min_upload_size: int = 256 # bytes
75
+ max_upload_size: int = 254 * 1024 * 1024 # 254 MiB
76
+
77
+
78
+ @dataclass
79
+ class AsyncStorageInfo:
80
+ """Comprehensive storage service information."""
81
+ pricing_no_cdn: AsyncStoragePricing
82
+ pricing_with_cdn: AsyncStoragePricing
83
+ token_address: str
84
+ token_symbol: str
85
+ providers: List[dict] # List of provider info dicts
86
+ service_parameters: AsyncServiceParameters
87
+ approved_provider_ids: List[int] = field(default_factory=list)
88
+
89
+
90
+ class AsyncStorageManager:
91
+ """
92
+ Central async storage manager with provider selection and dataset reuse.
93
+
94
+ Features:
95
+ - Smart provider selection by capabilities (CDN, IPNI, location)
96
+ - Dataset reuse based on metadata matching
97
+ - Multi-provider uploads for redundancy
98
+ - Preflight cost estimation
99
+
100
+ Example:
101
+ # Simple upload (auto-selects provider)
102
+ result = await manager.upload(data)
103
+
104
+ # Upload with specific provider
105
+ result = await manager.upload(data, provider_id=1)
106
+
107
+ # Multi-provider upload for redundancy
108
+ results = await manager.upload_multi(data, provider_count=3)
109
+
110
+ # Preflight check
111
+ info = await manager.preflight(len(data), provider_count=2)
112
+ """
113
+
114
+ def __init__(
115
+ self,
116
+ chain,
117
+ private_key: str,
118
+ sp_registry: Optional["AsyncSPRegistryService"] = None,
119
+ warm_storage: Optional["AsyncWarmStorageService"] = None,
120
+ retriever: Optional["AsyncChainRetriever"] = None,
121
+ ) -> None:
122
+ self._chain = chain
123
+ self._private_key = private_key
124
+ self._sp_registry = sp_registry
125
+ self._warm_storage = warm_storage
126
+ self._retriever = retriever
127
+ self._default_context: Optional[AsyncStorageContext] = None
128
+ self._context_cache: Dict[int, AsyncStorageContext] = {} # provider_id -> context
129
+
130
+ def create_context(
131
+ self,
132
+ pdp_endpoint: str,
133
+ data_set_id: int,
134
+ client_data_set_id: int,
135
+ provider_id: Optional[int] = None,
136
+ ) -> AsyncStorageContext:
137
+ """Create a storage context for a specific provider/dataset (low-level)."""
138
+ context = AsyncStorageContext(
139
+ pdp_endpoint=pdp_endpoint,
140
+ chain=self._chain,
141
+ private_key=self._private_key,
142
+ data_set_id=data_set_id,
143
+ client_data_set_id=client_data_set_id,
144
+ )
145
+ if provider_id is not None:
146
+ self._context_cache[provider_id] = context
147
+ return context
148
+
149
+ async def get_context(
150
+ self,
151
+ provider_id: Optional[int] = None,
152
+ provider_address: Optional[str] = None,
153
+ data_set_id: Optional[int] = None,
154
+ with_cdn: bool = False,
155
+ force_create_data_set: bool = False,
156
+ metadata: Optional[Dict[str, str]] = None,
157
+ exclude_provider_ids: Optional[List[int]] = None,
158
+ on_provider_selected: Optional[Callable] = None,
159
+ on_data_set_resolved: Optional[Callable] = None,
160
+ ) -> AsyncStorageContext:
161
+ """
162
+ Get or create an async storage context with smart provider/dataset selection.
163
+
164
+ This is the recommended way to get a context - it handles provider
165
+ selection, dataset reuse, and dataset creation automatically.
166
+
167
+ Args:
168
+ provider_id: Optional specific provider ID to use
169
+ provider_address: Optional specific provider address to use
170
+ data_set_id: Optional specific dataset ID to use
171
+ with_cdn: Whether to enable CDN services
172
+ force_create_data_set: Force creation of new dataset
173
+ metadata: Custom metadata for the dataset
174
+ exclude_provider_ids: Provider IDs to exclude from selection
175
+ on_provider_selected: Callback when provider is selected
176
+ on_data_set_resolved: Callback when dataset is resolved
177
+
178
+ Returns:
179
+ Configured AsyncStorageContext
180
+ """
181
+ if self._warm_storage is None:
182
+ raise ValueError("warm_storage required for smart context creation")
183
+ if self._sp_registry is None:
184
+ raise ValueError("sp_registry required for smart context creation")
185
+
186
+ # Check if we can reuse the default context
187
+ can_use_default = (
188
+ provider_id is None
189
+ and provider_address is None
190
+ and data_set_id is None
191
+ and not force_create_data_set
192
+ and self._default_context is not None
193
+ )
194
+
195
+ if can_use_default:
196
+ # Check if metadata matches
197
+ from pynapse.utils.metadata import combine_metadata, metadata_matches
198
+ requested_metadata = combine_metadata(metadata, with_cdn)
199
+ if metadata_matches(self._default_context.data_set_metadata, requested_metadata):
200
+ return self._default_context
201
+
202
+ # Create new context using factory method
203
+ options = AsyncStorageContextOptions(
204
+ provider_id=provider_id,
205
+ provider_address=provider_address,
206
+ data_set_id=data_set_id,
207
+ with_cdn=with_cdn,
208
+ force_create_data_set=force_create_data_set,
209
+ metadata=metadata,
210
+ exclude_provider_ids=exclude_provider_ids,
211
+ on_provider_selected=on_provider_selected,
212
+ on_data_set_resolved=on_data_set_resolved,
213
+ )
214
+
215
+ context = await AsyncStorageContext.create(
216
+ chain=self._chain,
217
+ private_key=self._private_key,
218
+ warm_storage=self._warm_storage,
219
+ sp_registry=self._sp_registry,
220
+ options=options,
221
+ )
222
+
223
+ # Cache as default if no specific options were provided
224
+ if provider_id is None and provider_address is None and data_set_id is None:
225
+ self._default_context = context
226
+
227
+ return context
228
+
229
+ async def get_contexts(
230
+ self,
231
+ count: int = 2,
232
+ with_cdn: bool = False,
233
+ force_create_data_set: bool = False,
234
+ metadata: Optional[Dict[str, str]] = None,
235
+ exclude_provider_ids: Optional[List[int]] = None,
236
+ on_provider_selected: Optional[Callable] = None,
237
+ on_data_set_resolved: Optional[Callable] = None,
238
+ ) -> List[AsyncStorageContext]:
239
+ """
240
+ Get or create multiple async storage contexts for multi-provider redundancy.
241
+
242
+ Args:
243
+ count: Number of contexts to create (default: 2)
244
+ with_cdn: Whether to enable CDN services
245
+ force_create_data_set: Force creation of new datasets
246
+ metadata: Custom metadata for datasets
247
+ exclude_provider_ids: Provider IDs to exclude from selection
248
+ on_provider_selected: Callback when provider is selected
249
+ on_data_set_resolved: Callback when dataset is resolved
250
+
251
+ Returns:
252
+ List of configured AsyncStorageContext instances
253
+ """
254
+ if self._warm_storage is None:
255
+ raise ValueError("warm_storage required for smart context creation")
256
+ if self._sp_registry is None:
257
+ raise ValueError("sp_registry required for smart context creation")
258
+
259
+ options = AsyncStorageContextOptions(
260
+ with_cdn=with_cdn,
261
+ force_create_data_set=force_create_data_set,
262
+ metadata=metadata,
263
+ exclude_provider_ids=exclude_provider_ids,
264
+ on_provider_selected=on_provider_selected,
265
+ on_data_set_resolved=on_data_set_resolved,
266
+ )
267
+
268
+ return await AsyncStorageContext.create_contexts(
269
+ chain=self._chain,
270
+ private_key=self._private_key,
271
+ warm_storage=self._warm_storage,
272
+ sp_registry=self._sp_registry,
273
+ count=count,
274
+ options=options,
275
+ )
276
+
277
+ async def get_default_context(self) -> AsyncStorageContext:
278
+ """
279
+ Get the default async storage context, creating one if needed.
280
+
281
+ Returns:
282
+ The default AsyncStorageContext
283
+ """
284
+ if self._default_context is None:
285
+ self._default_context = await self.get_context()
286
+ return self._default_context
287
+
288
+ async def select_providers(
289
+ self,
290
+ count: int = 1,
291
+ filter: Optional[AsyncProviderFilter] = None,
292
+ ) -> List[int]:
293
+ """
294
+ Select providers matching the given criteria.
295
+
296
+ Args:
297
+ count: Number of providers to select
298
+ filter: Optional filter criteria
299
+
300
+ Returns:
301
+ List of provider IDs
302
+ """
303
+ if self._sp_registry is None:
304
+ raise ValueError("sp_registry required for provider selection")
305
+
306
+ filter = filter or AsyncProviderFilter()
307
+
308
+ # If specific provider IDs requested, validate and return them
309
+ if filter.provider_ids:
310
+ return filter.provider_ids[:count]
311
+
312
+ # Get all active providers
313
+ providers = await self._sp_registry.get_all_active_providers()
314
+
315
+ # Filter by exclusions
316
+ if filter.exclude_provider_ids:
317
+ providers = [p for p in providers if p.provider_id not in filter.exclude_provider_ids]
318
+
319
+ selected = [p.provider_id for p in providers[:count]]
320
+ return selected
321
+
322
+ async def find_dataset(
323
+ self,
324
+ provider_id: int,
325
+ metadata: Optional[Dict[str, str]] = None,
326
+ ) -> Optional[AsyncDataSetMatch]:
327
+ """
328
+ Find an existing dataset for the given provider matching metadata.
329
+
330
+ Args:
331
+ provider_id: Provider to search datasets for
332
+ metadata: Metadata criteria to match
333
+
334
+ Returns:
335
+ Matching dataset info or None
336
+ """
337
+ if self._warm_storage is None:
338
+ return None
339
+
340
+ # TODO: Implement dataset search by provider and metadata
341
+ return None
342
+
343
+ async def preflight(
344
+ self,
345
+ size_bytes: int,
346
+ provider_count: int = 1,
347
+ duration_epochs: int = 2880, # ~1 day default
348
+ filter: Optional[AsyncProviderFilter] = None,
349
+ with_cdn: bool = False,
350
+ ) -> AsyncPreflightInfo:
351
+ """
352
+ Estimate storage costs before upload.
353
+
354
+ Args:
355
+ size_bytes: Size of data to upload
356
+ provider_count: Number of providers for redundancy
357
+ duration_epochs: Storage duration in epochs
358
+ filter: Optional provider filter criteria
359
+ with_cdn: Whether to include CDN in cost estimation
360
+
361
+ Returns:
362
+ Preflight estimation including costs
363
+ """
364
+ # Select providers
365
+ providers = await self.select_providers(count=provider_count, filter=filter)
366
+
367
+ # Try to get actual pricing from warm storage
368
+ if self._warm_storage is not None:
369
+ try:
370
+ pricing_rates = await self._warm_storage.get_current_pricing_rates()
371
+ if isinstance(pricing_rates, (list, tuple)) and len(pricing_rates) >= 3:
372
+ price_per_tib_month = int(pricing_rates[1] if with_cdn else pricing_rates[0])
373
+ epochs_per_month = int(pricing_rates[2])
374
+
375
+ # Calculate rate per epoch for this size
376
+ price_per_tib_epoch = price_per_tib_month // epochs_per_month if epochs_per_month else 0
377
+ estimated_rate = (size_bytes * price_per_tib_epoch * provider_count) // TIB
378
+ estimated_rate = max(1, estimated_rate) # minimum 1 unit
379
+ estimated_total = estimated_rate * duration_epochs
380
+
381
+ return AsyncPreflightInfo(
382
+ size_bytes=size_bytes,
383
+ estimated_cost_per_epoch=estimated_rate,
384
+ estimated_total_cost=estimated_total,
385
+ duration_epochs=duration_epochs,
386
+ provider_count=len(providers),
387
+ providers=providers,
388
+ )
389
+ except Exception:
390
+ pass
391
+
392
+ # Fallback: simplified cost calculation
393
+ estimated_rate = size_bytes * provider_count // TIB + 1 # minimum 1 unit
394
+ estimated_total = estimated_rate * duration_epochs
395
+
396
+ return AsyncPreflightInfo(
397
+ size_bytes=size_bytes,
398
+ estimated_cost_per_epoch=estimated_rate,
399
+ estimated_total_cost=estimated_total,
400
+ duration_epochs=duration_epochs,
401
+ provider_count=len(providers),
402
+ providers=providers,
403
+ )
404
+
405
+ async def preflight_upload(
406
+ self,
407
+ size_bytes: int,
408
+ with_cdn: bool = False,
409
+ payments_service=None,
410
+ ) -> dict:
411
+ """
412
+ Comprehensive preflight check including cost estimation and allowance validation.
413
+
414
+ This method checks:
415
+ 1. Storage costs per epoch/day/month
416
+ 2. Current service allowances (if payments_service provided)
417
+ 3. Whether allowances are sufficient
418
+
419
+ Args:
420
+ size_bytes: Size of data to upload in bytes
421
+ with_cdn: Whether CDN is enabled
422
+ payments_service: Optional AsyncPaymentsService for allowance checking
423
+
424
+ Returns:
425
+ Dict with estimated_cost, allowance_check, and any required actions
426
+ """
427
+ result = {
428
+ "estimated_cost": {
429
+ "per_epoch": 0,
430
+ "per_day": 0,
431
+ "per_month": 0,
432
+ },
433
+ "allowance_check": {
434
+ "sufficient": True,
435
+ "message": None,
436
+ },
437
+ "size_bytes": size_bytes,
438
+ "with_cdn": with_cdn,
439
+ }
440
+
441
+ # Get pricing
442
+ if self._warm_storage is not None:
443
+ try:
444
+ pricing_rates = await self._warm_storage.get_current_pricing_rates()
445
+ if isinstance(pricing_rates, (list, tuple)) and len(pricing_rates) >= 3:
446
+ price_per_tib_month = int(pricing_rates[1] if with_cdn else pricing_rates[0])
447
+ epochs_per_month = int(pricing_rates[2])
448
+
449
+ # Calculate costs
450
+ size_ratio = size_bytes / TIB
451
+ cost_per_month = int(price_per_tib_month * size_ratio)
452
+ cost_per_day = cost_per_month // DAYS_PER_MONTH
453
+ cost_per_epoch = cost_per_month // epochs_per_month if epochs_per_month else 0
454
+
455
+ result["estimated_cost"] = {
456
+ "per_epoch": cost_per_epoch,
457
+ "per_day": cost_per_day,
458
+ "per_month": cost_per_month,
459
+ }
460
+ except Exception:
461
+ pass
462
+
463
+ # Check allowances if payments service provided
464
+ if payments_service is not None and self._chain is not None:
465
+ try:
466
+ approval = await payments_service.service_approval(
467
+ self._chain.contracts.warm_storage
468
+ )
469
+
470
+ rate_needed = result["estimated_cost"]["per_epoch"]
471
+ # Lockup = rate * lockup_period (typically 10 days)
472
+ lockup_period = EPOCHS_PER_DAY * 10
473
+ lockup_needed = rate_needed * lockup_period
474
+
475
+ rate_sufficient = approval.rate_allowance >= rate_needed
476
+ lockup_sufficient = approval.lockup_allowance >= lockup_needed
477
+
478
+ result["allowance_check"] = {
479
+ "sufficient": rate_sufficient and lockup_sufficient,
480
+ "is_approved": approval.is_approved,
481
+ "rate_allowance": approval.rate_allowance,
482
+ "lockup_allowance": approval.lockup_allowance,
483
+ "rate_needed": rate_needed,
484
+ "lockup_needed": lockup_needed,
485
+ "message": None if (rate_sufficient and lockup_sufficient) else (
486
+ f"Insufficient allowances: need rate={rate_needed}, lockup={lockup_needed}"
487
+ ),
488
+ }
489
+ except Exception as e:
490
+ result["allowance_check"]["message"] = f"Failed to check allowances: {e}"
491
+
492
+ return result
493
+
494
+ async def upload(
495
+ self,
496
+ data: bytes,
497
+ pdp_endpoint: Optional[str] = None,
498
+ data_set_id: Optional[int] = None,
499
+ client_data_set_id: Optional[int] = None,
500
+ provider_id: Optional[int] = None,
501
+ metadata: Optional[Dict[str, str]] = None,
502
+ context: Optional[AsyncStorageContext] = None,
503
+ with_cdn: bool = False,
504
+ auto_create_context: bool = True,
505
+ ) -> AsyncUploadResult:
506
+ """
507
+ Upload data to storage asynchronously.
508
+
509
+ If warm_storage and sp_registry are configured, this method can
510
+ auto-create a context with smart provider selection. Otherwise,
511
+ explicit context parameters are required.
512
+
513
+ Args:
514
+ data: Bytes to upload
515
+ pdp_endpoint: PDP server endpoint (required if no auto-create)
516
+ data_set_id: Dataset ID (required if no auto-create)
517
+ client_data_set_id: Client dataset ID (required if no auto-create)
518
+ provider_id: Optional provider ID for selection/caching
519
+ metadata: Optional piece metadata
520
+ context: Explicit context to use (overrides other params)
521
+ with_cdn: Enable CDN services (for auto-create)
522
+ auto_create_context: Auto-create context if services available (default: True)
523
+
524
+ Returns:
525
+ Upload result with piece CID and tx hash
526
+ """
527
+ if context is not None:
528
+ return await context.upload(data, metadata=metadata)
529
+
530
+ # Check for cached context
531
+ if provider_id is not None and provider_id in self._context_cache:
532
+ return await self._context_cache[provider_id].upload(data, metadata=metadata)
533
+
534
+ # Try auto-create if services are available
535
+ if auto_create_context and self._warm_storage is not None and self._sp_registry is not None:
536
+ ctx = await self.get_context(
537
+ provider_id=provider_id,
538
+ with_cdn=with_cdn,
539
+ )
540
+ return await ctx.upload(data, metadata=metadata)
541
+
542
+ # Fall back to explicit context creation
543
+ if pdp_endpoint is None or data_set_id is None or client_data_set_id is None:
544
+ raise ValueError(
545
+ "pdp_endpoint, data_set_id, and client_data_set_id required "
546
+ "(or configure warm_storage and sp_registry for auto-creation)"
547
+ )
548
+
549
+ ctx = self.create_context(pdp_endpoint, data_set_id, client_data_set_id, provider_id)
550
+ return await ctx.upload(data, metadata=metadata)
551
+
552
+ async def upload_multi(
553
+ self,
554
+ data: bytes,
555
+ contexts: Sequence[AsyncStorageContext],
556
+ metadata: Optional[Dict[str, str]] = None,
557
+ ) -> List[AsyncUploadResult]:
558
+ """
559
+ Upload data to multiple storage providers for redundancy.
560
+
561
+ All contexts receive the same data with the same piece CID.
562
+
563
+ Args:
564
+ data: Bytes to upload
565
+ contexts: Storage contexts for each provider
566
+ metadata: Optional piece metadata
567
+
568
+ Returns:
569
+ List of upload results (one per context)
570
+ """
571
+ # Upload concurrently to all contexts
572
+ tasks = [ctx.upload(data, metadata=metadata) for ctx in contexts]
573
+ results = await asyncio.gather(*tasks)
574
+ return list(results)
575
+
576
+ async def download(
577
+ self,
578
+ piece_cid: str,
579
+ pdp_endpoint: Optional[str] = None,
580
+ context: Optional[AsyncStorageContext] = None,
581
+ provider_address: Optional[str] = None,
582
+ ) -> bytes:
583
+ """
584
+ Download data by piece CID asynchronously.
585
+
586
+ If a retriever is configured, this method can perform SP-agnostic
587
+ downloads by querying the client's datasets to find providers.
588
+
589
+ Args:
590
+ piece_cid: The piece CID to download
591
+ pdp_endpoint: PDP endpoint to download from (optional if retriever configured)
592
+ context: Explicit context to use
593
+ provider_address: Optional specific provider address for retriever
594
+
595
+ Returns:
596
+ Downloaded data bytes
597
+ """
598
+ if context is not None:
599
+ return await context.download(piece_cid)
600
+
601
+ # Try SP-agnostic download using retriever
602
+ if self._retriever is not None:
603
+ from eth_account import Account
604
+ acct = Account.from_key(self._private_key)
605
+ return await self._retriever.fetch_piece(
606
+ piece_cid=piece_cid,
607
+ client_address=acct.address,
608
+ provider_address=provider_address,
609
+ )
610
+
611
+ # Fall back to explicit endpoint
612
+ if pdp_endpoint is None:
613
+ raise ValueError(
614
+ "pdp_endpoint required (or configure retriever for SP-agnostic downloads)"
615
+ )
616
+
617
+ ctx = AsyncStorageContext(
618
+ pdp_endpoint=pdp_endpoint,
619
+ chain=self._chain,
620
+ private_key=self._private_key,
621
+ data_set_id=0,
622
+ client_data_set_id=0,
623
+ )
624
+ return await ctx.download(piece_cid)
625
+
626
+ async def find_datasets(self, client_address: Optional[str] = None) -> List[dict]:
627
+ """
628
+ Query datasets for a client with enhanced details.
629
+
630
+ Args:
631
+ client_address: Optional client address. If not provided,
632
+ uses the address derived from the private key.
633
+
634
+ Returns:
635
+ List of enhanced dataset info dictionaries
636
+ """
637
+ if self._warm_storage is None:
638
+ raise ValueError("warm_storage required for find_datasets")
639
+
640
+ if client_address is None:
641
+ from eth_account import Account
642
+ acct = Account.from_key(self._private_key)
643
+ client_address = acct.address
644
+
645
+ datasets = await self._warm_storage.get_client_data_sets_with_details(client_address)
646
+ return [
647
+ {
648
+ "data_set_id": ds.data_set_id,
649
+ "client_data_set_id": ds.client_data_set_id,
650
+ "provider_id": ds.provider_id,
651
+ "service_provider": ds.service_provider,
652
+ "payer": ds.payer,
653
+ "payee": ds.payee,
654
+ "active_piece_count": ds.active_piece_count,
655
+ "is_live": ds.is_live,
656
+ "is_managed": ds.is_managed,
657
+ "with_cdn": ds.with_cdn,
658
+ "metadata": ds.metadata,
659
+ "pdp_end_epoch": ds.pdp_end_epoch,
660
+ }
661
+ for ds in datasets
662
+ ]
663
+
664
+ async def terminate_data_set(self, data_set_id: int) -> str:
665
+ """
666
+ Terminate a dataset. This also removes all pieces in the dataset.
667
+
668
+ Args:
669
+ data_set_id: The ID of the dataset to terminate
670
+
671
+ Returns:
672
+ Transaction hash
673
+ """
674
+ if self._warm_storage is None:
675
+ raise ValueError("warm_storage required for terminate_data_set")
676
+
677
+ from eth_account import Account
678
+ acct = Account.from_key(self._private_key)
679
+ return await self._warm_storage.terminate_data_set(acct.address, data_set_id)
680
+
681
+ async def get_storage_info(self) -> AsyncStorageInfo:
682
+ """
683
+ Get comprehensive information about the storage service.
684
+
685
+ Returns service pricing, approved providers, contract addresses,
686
+ and configuration parameters.
687
+
688
+ Returns:
689
+ AsyncStorageInfo with pricing, providers, and service parameters
690
+ """
691
+ if self._warm_storage is None:
692
+ raise ValueError("warm_storage required for get_storage_info")
693
+ if self._sp_registry is None:
694
+ raise ValueError("sp_registry required for get_storage_info")
695
+
696
+ # Get pricing info
697
+ pricing_rates = await self._warm_storage.get_current_pricing_rates()
698
+
699
+ # Parse pricing - format may vary, handle common cases
700
+ if isinstance(pricing_rates, (list, tuple)) and len(pricing_rates) >= 4:
701
+ price_no_cdn = int(pricing_rates[0])
702
+ price_with_cdn = int(pricing_rates[1])
703
+ epochs_per_month = int(pricing_rates[2])
704
+ token_address = pricing_rates[3]
705
+ else:
706
+ # Fallback to individual calls
707
+ price_no_cdn = 0
708
+ price_with_cdn = 0
709
+ epochs_per_month = EPOCHS_PER_DAY * DAYS_PER_MONTH
710
+ token_address = ""
711
+
712
+ # Calculate per-epoch and per-day pricing
713
+ pricing_no_cdn = AsyncStoragePricing(
714
+ per_tib_per_month=price_no_cdn,
715
+ per_tib_per_day=price_no_cdn // DAYS_PER_MONTH if price_no_cdn else 0,
716
+ per_tib_per_epoch=price_no_cdn // epochs_per_month if price_no_cdn and epochs_per_month else 0,
717
+ )
718
+ pricing_with_cdn = AsyncStoragePricing(
719
+ per_tib_per_month=price_with_cdn,
720
+ per_tib_per_day=price_with_cdn // DAYS_PER_MONTH if price_with_cdn else 0,
721
+ per_tib_per_epoch=price_with_cdn // epochs_per_month if price_with_cdn and epochs_per_month else 0,
722
+ )
723
+
724
+ # Get approved provider IDs
725
+ try:
726
+ approved_ids = await self._warm_storage.get_approved_provider_ids()
727
+ except Exception:
728
+ approved_ids = []
729
+
730
+ # Get provider details
731
+ providers = []
732
+ for pid in approved_ids:
733
+ try:
734
+ provider = await self._sp_registry.get_provider(pid)
735
+ if provider and provider.is_active:
736
+ providers.append({
737
+ "provider_id": provider.provider_id,
738
+ "service_provider": provider.service_provider,
739
+ "payee": provider.payee,
740
+ "name": provider.name,
741
+ "description": provider.description,
742
+ "is_active": provider.is_active,
743
+ })
744
+ except Exception:
745
+ continue
746
+
747
+ return AsyncStorageInfo(
748
+ pricing_no_cdn=pricing_no_cdn,
749
+ pricing_with_cdn=pricing_with_cdn,
750
+ token_address=str(token_address),
751
+ token_symbol="USDFC", # Standard token for Filecoin storage
752
+ providers=providers,
753
+ service_parameters=AsyncServiceParameters(
754
+ epochs_per_month=epochs_per_month,
755
+ ),
756
+ approved_provider_ids=approved_ids,
757
+ )