synapse-filecoin-sdk 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. pynapse/__init__.py +6 -0
  2. pynapse/_version.py +1 -0
  3. pynapse/contracts/__init__.py +34 -0
  4. pynapse/contracts/abi_registry.py +11 -0
  5. pynapse/contracts/addresses.json +30 -0
  6. pynapse/contracts/erc20_abi.json +92 -0
  7. pynapse/contracts/errorsAbi.json +933 -0
  8. pynapse/contracts/filecoinPayV1Abi.json +2424 -0
  9. pynapse/contracts/filecoinWarmStorageServiceAbi.json +2363 -0
  10. pynapse/contracts/filecoinWarmStorageServiceStateViewAbi.json +651 -0
  11. pynapse/contracts/generated.py +35 -0
  12. pynapse/contracts/payments_abi.json +205 -0
  13. pynapse/contracts/pdpVerifierAbi.json +1266 -0
  14. pynapse/contracts/providerIdSetAbi.json +161 -0
  15. pynapse/contracts/serviceProviderRegistryAbi.json +1479 -0
  16. pynapse/contracts/sessionKeyRegistryAbi.json +147 -0
  17. pynapse/core/__init__.py +68 -0
  18. pynapse/core/abis.py +25 -0
  19. pynapse/core/chains.py +97 -0
  20. pynapse/core/constants.py +27 -0
  21. pynapse/core/errors.py +22 -0
  22. pynapse/core/piece.py +263 -0
  23. pynapse/core/rand.py +14 -0
  24. pynapse/core/typed_data.py +320 -0
  25. pynapse/core/utils.py +30 -0
  26. pynapse/evm/__init__.py +3 -0
  27. pynapse/evm/client.py +26 -0
  28. pynapse/filbeam/__init__.py +3 -0
  29. pynapse/filbeam/service.py +39 -0
  30. pynapse/payments/__init__.py +17 -0
  31. pynapse/payments/service.py +826 -0
  32. pynapse/pdp/__init__.py +21 -0
  33. pynapse/pdp/server.py +331 -0
  34. pynapse/pdp/types.py +38 -0
  35. pynapse/pdp/verifier.py +82 -0
  36. pynapse/retriever/__init__.py +12 -0
  37. pynapse/retriever/async_chain.py +227 -0
  38. pynapse/retriever/chain.py +209 -0
  39. pynapse/session/__init__.py +12 -0
  40. pynapse/session/key.py +30 -0
  41. pynapse/session/permissions.py +57 -0
  42. pynapse/session/registry.py +90 -0
  43. pynapse/sp_registry/__init__.py +11 -0
  44. pynapse/sp_registry/capabilities.py +25 -0
  45. pynapse/sp_registry/pdp_capabilities.py +102 -0
  46. pynapse/sp_registry/service.py +446 -0
  47. pynapse/sp_registry/types.py +52 -0
  48. pynapse/storage/__init__.py +57 -0
  49. pynapse/storage/async_context.py +682 -0
  50. pynapse/storage/async_manager.py +757 -0
  51. pynapse/storage/context.py +680 -0
  52. pynapse/storage/manager.py +758 -0
  53. pynapse/synapse.py +191 -0
  54. pynapse/utils/__init__.py +25 -0
  55. pynapse/utils/constants.py +25 -0
  56. pynapse/utils/errors.py +3 -0
  57. pynapse/utils/metadata.py +35 -0
  58. pynapse/utils/piece_url.py +16 -0
  59. pynapse/warm_storage/__init__.py +13 -0
  60. pynapse/warm_storage/service.py +513 -0
  61. synapse_filecoin_sdk-0.1.0.dist-info/METADATA +74 -0
  62. synapse_filecoin_sdk-0.1.0.dist-info/RECORD +64 -0
  63. synapse_filecoin_sdk-0.1.0.dist-info/WHEEL +4 -0
  64. synapse_filecoin_sdk-0.1.0.dist-info/licenses/LICENSE.md +228 -0
@@ -0,0 +1,513 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass, field
4
+ from typing import Any, Dict, List, Optional
5
+
6
+ from eth_account import Account
7
+ from web3 import AsyncWeb3, Web3
8
+
9
+ from pynapse.contracts import FWSS_ABI, FWSS_VIEW_ABI
10
+ from pynapse.core.chains import Chain
11
+
12
+
13
+ @dataclass
14
+ class DataSetInfo:
15
+ pdp_rail_id: int
16
+ cache_miss_rail_id: int
17
+ cdn_rail_id: int
18
+ payer: str
19
+ payee: str
20
+ service_provider: str
21
+ commission_bps: int
22
+ client_data_set_id: int
23
+ pdp_end_epoch: int
24
+ provider_id: int
25
+ data_set_id: int
26
+
27
+
28
+ @dataclass
29
+ class EnhancedDataSetInfo(DataSetInfo):
30
+ """Extended dataset info with live status, management info, and metadata."""
31
+ active_piece_count: int = 0
32
+ is_live: bool = False
33
+ is_managed: bool = False
34
+ with_cdn: bool = False
35
+ metadata: Dict[str, str] = field(default_factory=dict)
36
+
37
+
38
+ class SyncWarmStorageService:
39
+ def __init__(self, web3: Web3, chain: Chain, private_key: Optional[str] = None) -> None:
40
+ self._web3 = web3
41
+ self._chain = chain
42
+ self._private_key = private_key
43
+ self._fwss = web3.eth.contract(address=chain.contracts.warm_storage, abi=FWSS_ABI)
44
+ self._view = web3.eth.contract(address=chain.contracts.warm_storage_state_view, abi=FWSS_VIEW_ABI)
45
+
46
+ def get_data_set(self, data_set_id: int) -> DataSetInfo:
47
+ info = self._view.functions.getDataSet(data_set_id).call()
48
+ if int(info[0]) == 0:
49
+ raise ValueError(f"Data set {data_set_id} does not exist")
50
+ return DataSetInfo(
51
+ pdp_rail_id=int(info[0]),
52
+ cache_miss_rail_id=int(info[1]),
53
+ cdn_rail_id=int(info[2]),
54
+ payer=info[3],
55
+ payee=info[4],
56
+ service_provider=info[5],
57
+ commission_bps=int(info[6]),
58
+ client_data_set_id=int(info[7]),
59
+ pdp_end_epoch=int(info[8]),
60
+ provider_id=int(info[9]),
61
+ data_set_id=int(info[10]),
62
+ )
63
+
64
+ def get_client_data_sets(self, client_address: str) -> List[DataSetInfo]:
65
+ data_sets = self._view.functions.getClientDataSets(client_address).call()
66
+ return [self.get_data_set(int(ds[10])) for ds in data_sets]
67
+
68
+ def get_all_data_set_metadata(self, data_set_id: int) -> Dict[str, str]:
69
+ entries = self._view.functions.getAllDataSetMetadata(data_set_id).call()
70
+ return {key: value for key, value in entries}
71
+
72
+ def get_data_set_metadata(self, data_set_id: int, key: str) -> Optional[str]:
73
+ exists, value = self._view.functions.getDataSetMetadata(data_set_id, key).call()
74
+ return value if exists else None
75
+
76
+ def get_all_piece_metadata(self, data_set_id: int) -> List[Dict[str, str]]:
77
+ entries = self._view.functions.getAllPieceMetadata(data_set_id).call()
78
+ return [dict(entry) for entry in entries]
79
+
80
+ def get_piece_metadata(self, data_set_id: int, piece_id: int, key: str) -> Optional[str]:
81
+ exists, value = self._view.functions.getPieceMetadata(data_set_id, piece_id, key).call()
82
+ return value if exists else None
83
+
84
+ def get_data_set_status(self, data_set_id: int) -> int:
85
+ return int(self._view.functions.getDataSetStatus(data_set_id).call())
86
+
87
+ def get_data_set_size_in_bytes(self, leaf_count: int) -> int:
88
+ return int(self._view.functions.getDataSetSizeInBytes(leaf_count).call())
89
+
90
+ def get_pdp_config(self):
91
+ return self._view.functions.getPDPConfig().call()
92
+
93
+ def get_service_price(self, provider_id: int, token: str) -> int:
94
+ return int(self._fwss.functions.getServicePrice(provider_id, token).call())
95
+
96
+ def get_effective_rates(self):
97
+ return self._fwss.functions.getEffectiveRates().call()
98
+
99
+ def calculate_rate_per_epoch(self, total_bytes: int) -> int:
100
+ return int(self._fwss.functions.calculateRatePerEpoch(total_bytes).call())
101
+
102
+ def get_proving_period_for_epoch(self, data_set_id: int, epoch: int) -> int:
103
+ return int(self._fwss.functions.getProvingPeriodForEpoch(data_set_id, epoch).call())
104
+
105
+ def get_current_pricing_rates(self):
106
+ return self._view.functions.getCurrentPricingRates().call()
107
+
108
+ def next_pdp_challenge_window_start(self, data_set_id: int) -> int:
109
+ return int(self._view.functions.nextPDPChallengeWindowStart(data_set_id).call())
110
+
111
+ def proving_deadline(self, data_set_id: int) -> int:
112
+ return int(self._view.functions.provingDeadline(data_set_id).call())
113
+
114
+ def get_approved_providers(self, offset: int = 0, limit: int = 0) -> List[int]:
115
+ """
116
+ Get approved provider IDs with optional pagination.
117
+
118
+ Args:
119
+ offset: Starting index (0-based). Use 0 to start from beginning.
120
+ limit: Maximum number of providers to return. Use 0 to get all remaining providers.
121
+
122
+ Returns:
123
+ List of approved provider IDs.
124
+ """
125
+ providers = self._view.functions.getApprovedProviders(offset, limit).call()
126
+ return [int(pid) for pid in providers]
127
+
128
+ def get_approved_providers_length(self) -> int:
129
+ """Get the total count of approved providers."""
130
+ return int(self._view.functions.getApprovedProvidersLength().call())
131
+
132
+ def is_provider_approved(self, provider_id: int) -> bool:
133
+ """Check if a provider is approved for the warm storage service."""
134
+ return bool(self._view.functions.isProviderApproved(provider_id).call())
135
+
136
+ def add_approved_provider(self, account: str, provider_id: int) -> str:
137
+ if not self._private_key:
138
+ raise ValueError("private_key required")
139
+ txn = self._fwss.functions.addApprovedProvider(provider_id).build_transaction(
140
+ {
141
+ "from": account,
142
+ "nonce": self._web3.eth.get_transaction_count(account),
143
+ }
144
+ )
145
+ signed = self._web3.eth.account.sign_transaction(txn, private_key=self._private_key)
146
+ tx_hash = self._web3.eth.send_raw_transaction(signed.rawTransaction)
147
+ return tx_hash.hex()
148
+
149
+ def remove_approved_provider(self, account: str, provider_id: int) -> str:
150
+ if not self._private_key:
151
+ raise ValueError("private_key required")
152
+ txn = self._fwss.functions.removeApprovedProvider(provider_id).build_transaction(
153
+ {
154
+ "from": account,
155
+ "nonce": self._web3.eth.get_transaction_count(account),
156
+ }
157
+ )
158
+ signed = self._web3.eth.account.sign_transaction(txn, private_key=self._private_key)
159
+ tx_hash = self._web3.eth.send_raw_transaction(signed.rawTransaction)
160
+ return tx_hash.hex()
161
+
162
+ def get_approved_provider_ids(self) -> List[int]:
163
+ """Get list of all approved provider IDs for the warm storage service."""
164
+ # Use the view contract's getApprovedProviders with offset=0, limit=0 to get all
165
+ return self.get_approved_providers(offset=0, limit=0)
166
+
167
+ def get_active_piece_count(self, data_set_id: int) -> int:
168
+ """Get count of active pieces in a dataset (excludes removed pieces)."""
169
+ from pynapse.pdp import SyncPDPVerifier
170
+ verifier = SyncPDPVerifier(self._web3, self._chain)
171
+ return verifier.get_active_piece_count(data_set_id)
172
+
173
+ def data_set_live(self, data_set_id: int) -> bool:
174
+ """Check if a dataset is live."""
175
+ from pynapse.pdp import SyncPDPVerifier
176
+ verifier = SyncPDPVerifier(self._web3, self._chain)
177
+ return verifier.data_set_live(data_set_id)
178
+
179
+ def get_data_set_listener(self, data_set_id: int) -> str:
180
+ """Get the listener address for a dataset."""
181
+ from pynapse.pdp import SyncPDPVerifier
182
+ verifier = SyncPDPVerifier(self._web3, self._chain)
183
+ return verifier.get_data_set_listener(data_set_id)
184
+
185
+ def validate_data_set(self, data_set_id: int) -> None:
186
+ """
187
+ Validate that a dataset is live and managed by this WarmStorage contract.
188
+
189
+ Raises:
190
+ ValueError: If dataset is not live or not managed by this contract.
191
+ """
192
+ if not self.data_set_live(data_set_id):
193
+ raise ValueError(f"Data set {data_set_id} does not exist or is not live")
194
+
195
+ listener = self.get_data_set_listener(data_set_id)
196
+ if listener.lower() != self._chain.contracts.warm_storage.lower():
197
+ raise ValueError(
198
+ f"Data set {data_set_id} is not managed by this WarmStorage contract "
199
+ f"({self._chain.contracts.warm_storage}), managed by {listener}"
200
+ )
201
+
202
+ def terminate_data_set(self, account: str, data_set_id: int) -> str:
203
+ """
204
+ Terminate a dataset. This also removes all pieces in the dataset.
205
+
206
+ Args:
207
+ account: The account address to send from
208
+ data_set_id: The ID of the dataset to terminate
209
+
210
+ Returns:
211
+ Transaction hash
212
+ """
213
+ if not self._private_key:
214
+ raise ValueError("private_key required")
215
+ txn = self._fwss.functions.terminateDataSet(data_set_id).build_transaction(
216
+ {
217
+ "from": account,
218
+ "nonce": self._web3.eth.get_transaction_count(account),
219
+ }
220
+ )
221
+ signed = self._web3.eth.account.sign_transaction(txn, private_key=self._private_key)
222
+ tx_hash = self._web3.eth.send_raw_transaction(signed.rawTransaction)
223
+ return tx_hash.hex()
224
+
225
+ def get_client_data_sets_with_details(self, client_address: str) -> List[EnhancedDataSetInfo]:
226
+ """
227
+ Get all datasets for a client with enhanced details.
228
+
229
+ Includes live status, management info, metadata, and piece counts.
230
+
231
+ Args:
232
+ client_address: The client address to query
233
+
234
+ Returns:
235
+ List of enhanced dataset info
236
+ """
237
+ from pynapse.pdp import SyncPDPVerifier
238
+ verifier = SyncPDPVerifier(self._web3, self._chain)
239
+
240
+ data_sets = self.get_client_data_sets(client_address)
241
+ enhanced: List[EnhancedDataSetInfo] = []
242
+
243
+ for ds in data_sets:
244
+ try:
245
+ is_live = verifier.data_set_live(ds.data_set_id)
246
+ listener = verifier.get_data_set_listener(ds.data_set_id) if is_live else ""
247
+ is_managed = listener.lower() == self._chain.contracts.warm_storage.lower() if listener else False
248
+ metadata = self.get_all_data_set_metadata(ds.data_set_id) if is_live else {}
249
+ active_piece_count = verifier.get_active_piece_count(ds.data_set_id) if is_live else 0
250
+ with_cdn = ds.cdn_rail_id > 0 and "withCDN" in metadata
251
+
252
+ enhanced.append(EnhancedDataSetInfo(
253
+ pdp_rail_id=ds.pdp_rail_id,
254
+ cache_miss_rail_id=ds.cache_miss_rail_id,
255
+ cdn_rail_id=ds.cdn_rail_id,
256
+ payer=ds.payer,
257
+ payee=ds.payee,
258
+ service_provider=ds.service_provider,
259
+ commission_bps=ds.commission_bps,
260
+ client_data_set_id=ds.client_data_set_id,
261
+ pdp_end_epoch=ds.pdp_end_epoch,
262
+ provider_id=ds.provider_id,
263
+ data_set_id=ds.data_set_id,
264
+ active_piece_count=active_piece_count,
265
+ is_live=is_live,
266
+ is_managed=is_managed,
267
+ with_cdn=with_cdn,
268
+ metadata=metadata,
269
+ ))
270
+ except Exception as e:
271
+ # Skip datasets that fail to load details
272
+ continue
273
+
274
+ return enhanced
275
+
276
+
277
+ class AsyncWarmStorageService:
278
+ def __init__(self, web3: AsyncWeb3, chain: Chain, private_key: Optional[str] = None) -> None:
279
+ self._web3 = web3
280
+ self._chain = chain
281
+ self._private_key = private_key
282
+ self._fwss = web3.eth.contract(address=chain.contracts.warm_storage, abi=FWSS_ABI)
283
+ self._view = web3.eth.contract(address=chain.contracts.warm_storage_state_view, abi=FWSS_VIEW_ABI)
284
+
285
+ async def get_data_set(self, data_set_id: int) -> DataSetInfo:
286
+ info = await self._view.functions.getDataSet(data_set_id).call()
287
+ if int(info[0]) == 0:
288
+ raise ValueError(f"Data set {data_set_id} does not exist")
289
+ return DataSetInfo(
290
+ pdp_rail_id=int(info[0]),
291
+ cache_miss_rail_id=int(info[1]),
292
+ cdn_rail_id=int(info[2]),
293
+ payer=info[3],
294
+ payee=info[4],
295
+ service_provider=info[5],
296
+ commission_bps=int(info[6]),
297
+ client_data_set_id=int(info[7]),
298
+ pdp_end_epoch=int(info[8]),
299
+ provider_id=int(info[9]),
300
+ data_set_id=int(info[10]),
301
+ )
302
+
303
+ async def get_client_data_sets(self, client_address: str) -> List[DataSetInfo]:
304
+ data_sets = await self._view.functions.getClientDataSets(client_address).call()
305
+ return [await self.get_data_set(int(ds[10])) for ds in data_sets]
306
+
307
+ async def get_all_data_set_metadata(self, data_set_id: int) -> Dict[str, str]:
308
+ entries = await self._view.functions.getAllDataSetMetadata(data_set_id).call()
309
+ return {key: value for key, value in entries}
310
+
311
+ async def get_data_set_metadata(self, data_set_id: int, key: str) -> Optional[str]:
312
+ exists, value = await self._view.functions.getDataSetMetadata(data_set_id, key).call()
313
+ return value if exists else None
314
+
315
+ async def get_all_piece_metadata(self, data_set_id: int) -> List[Dict[str, str]]:
316
+ entries = await self._view.functions.getAllPieceMetadata(data_set_id).call()
317
+ return [dict(entry) for entry in entries]
318
+
319
+ async def get_piece_metadata(self, data_set_id: int, piece_id: int, key: str) -> Optional[str]:
320
+ exists, value = await self._view.functions.getPieceMetadata(data_set_id, piece_id, key).call()
321
+ return value if exists else None
322
+
323
+ async def get_data_set_status(self, data_set_id: int) -> int:
324
+ return int(await self._view.functions.getDataSetStatus(data_set_id).call())
325
+
326
+ async def get_data_set_size_in_bytes(self, leaf_count: int) -> int:
327
+ return int(await self._view.functions.getDataSetSizeInBytes(leaf_count).call())
328
+
329
+ async def get_pdp_config(self):
330
+ return await self._view.functions.getPDPConfig().call()
331
+
332
+ async def get_service_price(self, provider_id: int, token: str) -> int:
333
+ return int(await self._fwss.functions.getServicePrice(provider_id, token).call())
334
+
335
+ async def get_effective_rates(self):
336
+ return await self._fwss.functions.getEffectiveRates().call()
337
+
338
+ async def calculate_rate_per_epoch(self, total_bytes: int) -> int:
339
+ return int(await self._fwss.functions.calculateRatePerEpoch(total_bytes).call())
340
+
341
+ async def get_proving_period_for_epoch(self, data_set_id: int, epoch: int) -> int:
342
+ return int(await self._fwss.functions.getProvingPeriodForEpoch(data_set_id, epoch).call())
343
+
344
+ async def get_current_pricing_rates(self):
345
+ return await self._view.functions.getCurrentPricingRates().call()
346
+
347
+ async def next_pdp_challenge_window_start(self, data_set_id: int) -> int:
348
+ return int(await self._view.functions.nextPDPChallengeWindowStart(data_set_id).call())
349
+
350
+ async def proving_deadline(self, data_set_id: int) -> int:
351
+ return int(await self._view.functions.provingDeadline(data_set_id).call())
352
+
353
+ async def get_approved_providers(self, offset: int = 0, limit: int = 0) -> List[int]:
354
+ """
355
+ Get approved provider IDs with optional pagination.
356
+
357
+ Args:
358
+ offset: Starting index (0-based). Use 0 to start from beginning.
359
+ limit: Maximum number of providers to return. Use 0 to get all remaining providers.
360
+
361
+ Returns:
362
+ List of approved provider IDs.
363
+ """
364
+ providers = await self._view.functions.getApprovedProviders(offset, limit).call()
365
+ return [int(pid) for pid in providers]
366
+
367
+ async def get_approved_providers_length(self) -> int:
368
+ """Get the total count of approved providers."""
369
+ return int(await self._view.functions.getApprovedProvidersLength().call())
370
+
371
+ async def is_provider_approved(self, provider_id: int) -> bool:
372
+ """Check if a provider is approved for the warm storage service."""
373
+ return bool(await self._view.functions.isProviderApproved(provider_id).call())
374
+
375
+ async def add_approved_provider(self, account: str, provider_id: int) -> str:
376
+ if not self._private_key:
377
+ raise ValueError("private_key required")
378
+ txn = await self._fwss.functions.addApprovedProvider(provider_id).build_transaction(
379
+ {
380
+ "from": account,
381
+ "nonce": await self._web3.eth.get_transaction_count(account),
382
+ }
383
+ )
384
+ signed = Account.sign_transaction(txn, private_key=self._private_key)
385
+ tx_hash = await self._web3.eth.send_raw_transaction(signed.rawTransaction)
386
+ return tx_hash.hex()
387
+
388
+ async def remove_approved_provider(self, account: str, provider_id: int) -> str:
389
+ if not self._private_key:
390
+ raise ValueError("private_key required")
391
+ txn = await self._fwss.functions.removeApprovedProvider(provider_id).build_transaction(
392
+ {
393
+ "from": account,
394
+ "nonce": await self._web3.eth.get_transaction_count(account),
395
+ }
396
+ )
397
+ signed = Account.sign_transaction(txn, private_key=self._private_key)
398
+ tx_hash = await self._web3.eth.send_raw_transaction(signed.rawTransaction)
399
+ return tx_hash.hex()
400
+
401
+ async def get_approved_provider_ids(self) -> List[int]:
402
+ """Get list of all approved provider IDs for the warm storage service."""
403
+ # Use the view contract's getApprovedProviders with offset=0, limit=0 to get all
404
+ return await self.get_approved_providers(offset=0, limit=0)
405
+
406
+ async def get_active_piece_count(self, data_set_id: int) -> int:
407
+ """Get count of active pieces in a dataset (excludes removed pieces)."""
408
+ from pynapse.pdp import AsyncPDPVerifier
409
+ verifier = AsyncPDPVerifier(self._web3, self._chain)
410
+ return await verifier.get_active_piece_count(data_set_id)
411
+
412
+ async def data_set_live(self, data_set_id: int) -> bool:
413
+ """Check if a dataset is live."""
414
+ from pynapse.pdp import AsyncPDPVerifier
415
+ verifier = AsyncPDPVerifier(self._web3, self._chain)
416
+ return await verifier.data_set_live(data_set_id)
417
+
418
+ async def get_data_set_listener(self, data_set_id: int) -> str:
419
+ """Get the listener address for a dataset."""
420
+ from pynapse.pdp import AsyncPDPVerifier
421
+ verifier = AsyncPDPVerifier(self._web3, self._chain)
422
+ return await verifier.get_data_set_listener(data_set_id)
423
+
424
+ async def validate_data_set(self, data_set_id: int) -> None:
425
+ """
426
+ Validate that a dataset is live and managed by this WarmStorage contract.
427
+
428
+ Raises:
429
+ ValueError: If dataset is not live or not managed by this contract.
430
+ """
431
+ if not await self.data_set_live(data_set_id):
432
+ raise ValueError(f"Data set {data_set_id} does not exist or is not live")
433
+
434
+ listener = await self.get_data_set_listener(data_set_id)
435
+ if listener.lower() != self._chain.contracts.warm_storage.lower():
436
+ raise ValueError(
437
+ f"Data set {data_set_id} is not managed by this WarmStorage contract "
438
+ f"({self._chain.contracts.warm_storage}), managed by {listener}"
439
+ )
440
+
441
+ async def terminate_data_set(self, account: str, data_set_id: int) -> str:
442
+ """
443
+ Terminate a dataset. This also removes all pieces in the dataset.
444
+
445
+ Args:
446
+ account: The account address to send from
447
+ data_set_id: The ID of the dataset to terminate
448
+
449
+ Returns:
450
+ Transaction hash
451
+ """
452
+ if not self._private_key:
453
+ raise ValueError("private_key required")
454
+ txn = await self._fwss.functions.terminateDataSet(data_set_id).build_transaction(
455
+ {
456
+ "from": account,
457
+ "nonce": await self._web3.eth.get_transaction_count(account),
458
+ }
459
+ )
460
+ signed = Account.sign_transaction(txn, private_key=self._private_key)
461
+ tx_hash = await self._web3.eth.send_raw_transaction(signed.rawTransaction)
462
+ return tx_hash.hex()
463
+
464
+ async def get_client_data_sets_with_details(self, client_address: str) -> List[EnhancedDataSetInfo]:
465
+ """
466
+ Get all datasets for a client with enhanced details.
467
+
468
+ Includes live status, management info, metadata, and piece counts.
469
+
470
+ Args:
471
+ client_address: The client address to query
472
+
473
+ Returns:
474
+ List of enhanced dataset info
475
+ """
476
+ from pynapse.pdp import AsyncPDPVerifier
477
+ verifier = AsyncPDPVerifier(self._web3, self._chain)
478
+
479
+ data_sets = await self.get_client_data_sets(client_address)
480
+ enhanced: List[EnhancedDataSetInfo] = []
481
+
482
+ for ds in data_sets:
483
+ try:
484
+ is_live = await verifier.data_set_live(ds.data_set_id)
485
+ listener = await verifier.get_data_set_listener(ds.data_set_id) if is_live else ""
486
+ is_managed = listener.lower() == self._chain.contracts.warm_storage.lower() if listener else False
487
+ metadata = await self.get_all_data_set_metadata(ds.data_set_id) if is_live else {}
488
+ active_piece_count = await verifier.get_active_piece_count(ds.data_set_id) if is_live else 0
489
+ with_cdn = ds.cdn_rail_id > 0 and "withCDN" in metadata
490
+
491
+ enhanced.append(EnhancedDataSetInfo(
492
+ pdp_rail_id=ds.pdp_rail_id,
493
+ cache_miss_rail_id=ds.cache_miss_rail_id,
494
+ cdn_rail_id=ds.cdn_rail_id,
495
+ payer=ds.payer,
496
+ payee=ds.payee,
497
+ service_provider=ds.service_provider,
498
+ commission_bps=ds.commission_bps,
499
+ client_data_set_id=ds.client_data_set_id,
500
+ pdp_end_epoch=ds.pdp_end_epoch,
501
+ provider_id=ds.provider_id,
502
+ data_set_id=ds.data_set_id,
503
+ active_piece_count=active_piece_count,
504
+ is_live=is_live,
505
+ is_managed=is_managed,
506
+ with_cdn=with_cdn,
507
+ metadata=metadata,
508
+ ))
509
+ except Exception as e:
510
+ # Skip datasets that fail to load details
511
+ continue
512
+
513
+ return enhanced
@@ -0,0 +1,74 @@
1
+ Metadata-Version: 2.4
2
+ Name: synapse-filecoin-sdk
3
+ Version: 0.1.0
4
+ Summary: Python SDK for Filecoin Onchain Cloud (Synapse)
5
+ Project-URL: Homepage, https://github.com/FilOzone/synapse-sdk
6
+ Project-URL: Repository, https://github.com/FilOzone/synapse-sdk
7
+ Author: FilOz / Data Preservation Programs
8
+ License: Apache-2.0 OR MIT
9
+ License-File: LICENSE.md
10
+ Keywords: filecoin,pdp,storage,synapse,web3
11
+ Classifier: License :: OSI Approved :: Apache Software License
12
+ Classifier: License :: OSI Approved :: MIT License
13
+ Classifier: Programming Language :: Python :: 3
14
+ Classifier: Programming Language :: Python :: 3.11
15
+ Classifier: Programming Language :: Python :: 3.12
16
+ Classifier: Programming Language :: Python :: 3.13
17
+ Requires-Python: >=3.11
18
+ Requires-Dist: httpx<0.28.0,>=0.25.0
19
+ Requires-Dist: multiformats<0.4.0,>=0.3.1
20
+ Requires-Dist: web3<7,>=6.0.0
21
+ Provides-Extra: test
22
+ Requires-Dist: pytest-asyncio<0.24,>=0.23.0; extra == 'test'
23
+ Requires-Dist: pytest<9,>=8.0.0; extra == 'test'
24
+ Description-Content-Type: text/markdown
25
+
26
+ # Pynapse
27
+
28
+ Python SDK for Filecoin Onchain Cloud (Synapse).
29
+
30
+ This project mirrors the JS SDK in `FilOzone/synapse-sdk` and references the Go implementation in `data-preservation-programs/go-synapse` for parity.
31
+
32
+ ## Status
33
+
34
+ Work in progress. Parity is being implemented in incremental commits.
35
+
36
+ ## Install (dev)
37
+
38
+ ```bash
39
+ uv venv
40
+ uv pip install -e .[test]
41
+ ```
42
+
43
+ PyPI package name: `synapse-filecoin-sdk`
44
+ Python import: `pynapse`
45
+
46
+ ## Install (PyPI)
47
+
48
+ ```bash
49
+ pip install synapse-filecoin-sdk
50
+ ```
51
+
52
+ ## CommP / PieceCID
53
+
54
+ `pynapse` uses `stream-commp` from `go-fil-commp-hashhash` for PieceCID calculation.
55
+ Set `PYNAPSE_COMMP_HELPER` to override the helper path.
56
+
57
+ ## License
58
+
59
+ Dual-licensed under Apache-2.0 OR MIT. See `LICENSE.md`.
60
+
61
+ ## Publishing to PyPI
62
+
63
+ Publishing is automated via GitHub Actions in `.github/workflows/publish-pypi.yml`.
64
+
65
+ 1. In PyPI, create the project (or use an existing one) and configure a Trusted Publisher for this GitHub repository and workflow.
66
+ 2. In GitHub, optionally protect the `pypi` environment for manual approval.
67
+ 3. Tag a release and push the tag:
68
+
69
+ ```bash
70
+ git tag v0.1.1
71
+ git push origin v0.1.1
72
+ ```
73
+
74
+ The workflow builds the package, runs `twine check`, and publishes to PyPI via OIDC (no API token required).
@@ -0,0 +1,64 @@
1
+ pynapse/__init__.py,sha256=EcNT2_edj2ICiSNlOjdiRTOsqSn4pK5-W0MCfMEHfEA,187
2
+ pynapse/_version.py,sha256=kUR5RAFc7HCeiqdlX36dZOHkUI5wI6V_43RpEcD8b-0,22
3
+ pynapse/synapse.py,sha256=crjmmtfFfQzRg4iZqwAOe9JOF-ddNi0PV432XQTyNNk,6582
4
+ pynapse/contracts/__init__.py,sha256=8nz_zyobw86VKfzw7sZaoHrvf_YD8WrDuh89UdeEHLQ,757
5
+ pynapse/contracts/abi_registry.py,sha256=PJQalwqabJWWdC65SUJUxIGz1f5zBxHbdyTQD3azckU,219
6
+ pynapse/contracts/addresses.json,sha256=sL2KUtaRJ9xcVB-5Ex_48gv42Wl0kEZNxy8MaqpisKE,1093
7
+ pynapse/contracts/erc20_abi.json,sha256=79NoePu425E01aE3igheECCSe19kEaYosIUNjRbTvYc,2039
8
+ pynapse/contracts/errorsAbi.json,sha256=M-Go_-GZUJztT28a0KuNca3P5syfLsbT-UTGEO1Ybn8,17758
9
+ pynapse/contracts/filecoinPayV1Abi.json,sha256=LaWdfg2eA_y7AnDINxx801e4AuRLCERoBJplno6Noys,49492
10
+ pynapse/contracts/filecoinWarmStorageServiceAbi.json,sha256=EAO6i-oCG6Z1rZ6q0qWWcde2x7PvcRqg8tXCihM6xWk,46817
11
+ pynapse/contracts/filecoinWarmStorageServiceStateViewAbi.json,sha256=LSZ6s0RZCmfDvhiz9Ikq3ki39YaiPnylCw2BAulypvU,13120
12
+ pynapse/contracts/generated.py,sha256=10Hk0bNx_i8i1rwJcZvJxQ4GVs4W3UInz9cJ_bMbCJM,944
13
+ pynapse/contracts/payments_abi.json,sha256=p_rXhvzSXylbJaOqCegx7i5mdp6G4kQOBn4OraJt3-M,5467
14
+ pynapse/contracts/pdpVerifierAbi.json,sha256=cOQ1Y4YV1k7hmtJ3cBZEQxsOR0C095ovlNKlimuDlKU,24053
15
+ pynapse/contracts/providerIdSetAbi.json,sha256=5Ocs6YPs2tUAfGQBkaSQVAHBMQybKXqGJA4mK-hBzpE,2929
16
+ pynapse/contracts/serviceProviderRegistryAbi.json,sha256=p7MKmdbM8-RfTBDbxJ-z0KDf4VrNhmQgPu30w5SNM7g,32230
17
+ pynapse/contracts/sessionKeyRegistryAbi.json,sha256=IeS78vnmw6jgMsfBQAh31J8_d6-zyNsTF55RIZEpGdc,2898
18
+ pynapse/core/__init__.py,sha256=YuqvIEwzUJjlLOsv4HwzRu6iGJyldmOyU-7AdqB14JI,1560
19
+ pynapse/core/abis.py,sha256=Jihn4VrMoE_FoYeihnoAkXJHgGZ6ovGEXQCtzVBHDmg,505
20
+ pynapse/core/chains.py,sha256=ZCo6xY6m-yQIUmew6V8tyy3vtC0P12IUFx0u3mcWmSI,3105
21
+ pynapse/core/constants.py,sha256=KlkFVSSHMol859n9MWsj2d5u_kIa72Ld_dxTAQucubw,597
22
+ pynapse/core/errors.py,sha256=XtRdIqvvBEeh4G2bFaO2blNS37BOvBiCYR4BmkeueQ0,661
23
+ pynapse/core/piece.py,sha256=s_NpLQ70rgQ_CkNUrJpqv0OTxG6i-xiGp7x5VVCb4Gk,8836
24
+ pynapse/core/rand.py,sha256=t1mqp_R3bYhGP8WaCqWdAwF0y_usATkVzOp6FsYb61w,280
25
+ pynapse/core/typed_data.py,sha256=iXcsPhAxSZtrOKA2DRUX60gGMc_a5FaS6pAEJIM8ZTM,10271
26
+ pynapse/core/utils.py,sha256=OoCRo-4aOuxEy8FsVQUnutYayCVDT0HXwlMWlKaCgNw,811
27
+ pynapse/evm/__init__.py,sha256=4lPmHfNArjp-QfYg-SpL6NzkoQ8JeSM3LsvjEMqgRkI,97
28
+ pynapse/evm/client.py,sha256=OZY3MLq49KjjxTx3w25xgwoX9fWyvLfq1amFsHfzoLQ,608
29
+ pynapse/filbeam/__init__.py,sha256=aY5abqgV5mJikOkn0IDNyjB8y9PsHwU8zWkUKL5dnGg,96
30
+ pynapse/filbeam/service.py,sha256=2QcAakeDxiaNahAwX4O6KiEuUP3abXq_qyuPwsXaFQM,1311
31
+ pynapse/payments/__init__.py,sha256=Hpwyec7OK8C1OVwrNgmxuH71pPzUBRsgwWXgZzKCt5c,302
32
+ pynapse/payments/service.py,sha256=DFLhMgeuM7AWgZSe5dWtGEqa1amYDujbFr366BmfAkI,32121
33
+ pynapse/pdp/__init__.py,sha256=cfS8u4nGQ7dKHZxy3fUCHSnwUTjkejNya9iGnOHCt9Q,490
34
+ pynapse/pdp/server.py,sha256=K2cvLpW373wJcoTm_oTHSpNk_fxe_cZBL7u5UEuuFAw,14982
35
+ pynapse/pdp/types.py,sha256=QyQgImpUKqAdjflWXxYrskadl-VIYtw3EupseVQmH3k,667
36
+ pynapse/pdp/verifier.py,sha256=8XMrdrqDd4i1GOQgQpUYVWGPHM4VDuNHwjijAxaSKT0,3785
37
+ pynapse/retriever/__init__.py,sha256=fwwH911Jw-T0evX_hxvrxlE861N5vHjtvzehIUWEiD4,401
38
+ pynapse/retriever/async_chain.py,sha256=WXUq60mFXoBOjUH8WzkgtXDgWMWy-wslLhumPSXHTK8,8040
39
+ pynapse/retriever/chain.py,sha256=hWcEo8FkG_O_1Tb9NW-O6ou2TLuoOiqHm6NQlsoK2ss,7105
40
+ pynapse/session/__init__.py,sha256=qZ6rLr2pcQYoeBUAJVvAPxRjWZaebOkSGtkd4zA22oU,380
41
+ pynapse/session/key.py,sha256=OHyvVfqwlUEAVv1mBA7keLl4NmrHePMf_TTOXlApFFA,1343
42
+ pynapse/session/permissions.py,sha256=8kafim73IAIdk-ogr44sVLJjFtFRaZqD691chHaA8_8,1728
43
+ pynapse/session/registry.py,sha256=QyRZtUyIKlVqktf2_2yNOgGKHU5NeSa8oZutgrkiz_M,4457
44
+ pynapse/sp_registry/__init__.py,sha256=x9GuCB3sqyJDOYk0hJi1_R3j8lDG_GQSKoptzcr5i8M,331
45
+ pynapse/sp_registry/capabilities.py,sha256=L7Q6oNVcrPnIP8DdoO-FjS28_blUU1sD6OxUohugGRQ,748
46
+ pynapse/sp_registry/pdp_capabilities.py,sha256=f1c1GPr7GnVwX1EWiVwb6ezHe4tU5qV-4RB9StzDaRo,3920
47
+ pynapse/sp_registry/service.py,sha256=fk3gyJBLcuGSvYqjJcg1Xw9PXhNvAKsvpWBtgQ0-Ukc,18908
48
+ pynapse/sp_registry/types.py,sha256=zj0vp2t5ICrBx0AVF1f4sFbief-B8dDpFY_w07LrDHs,1048
49
+ pynapse/storage/__init__.py,sha256=UFWVYsNRSt1HDr8p6bOWZPvzXyaxgCxAlnyK9Cq0v34,1244
50
+ pynapse/storage/async_context.py,sha256=InrWkSm8dCoio3LCx8K1ZaQg1ecWmLhOz86gwHINuEA,26128
51
+ pynapse/storage/async_manager.py,sha256=DdNLmmOVHum5Wac6okUiIexFpmUWixcAySOPIDpVWTg,28963
52
+ pynapse/storage/context.py,sha256=RKN9D0ty_buIJvU8NGgvXOZf5-EHMyP7aeMCF-8R7Fg,25257
53
+ pynapse/storage/manager.py,sha256=2HtzgKSovpUM6eb9K1hQPCFKit36WXOv6xd_rTHdh6I,28424
54
+ pynapse/utils/__init__.py,sha256=uheqRLPWtbJodtEjeVi9bpKmdLv0CWpJwbm4v5apXXU,660
55
+ pynapse/utils/constants.py,sha256=HgncDwakYKwXXSphQCEwDeqUHsRu-83HYIkVUv1cvfI,735
56
+ pynapse/utils/errors.py,sha256=cVI_G2ru89DEd6lSB-udF9Bbi-J6KhxcH3mjACkvFRE,103
57
+ pynapse/utils/metadata.py,sha256=-tEl9PAgPdBUhFqze0-ZRzn6_jIbiHTLDnlD3QM0FQ0,1139
58
+ pynapse/utils/piece_url.py,sha256=md27_fxtcCyBvREBLNBJquJMj1V1gSdobfHe9x3RmhU,513
59
+ pynapse/warm_storage/__init__.py,sha256=WdUamV4veb7ZHyRWU12tQj8V5IVblbrgpNAyv5WSUE8,246
60
+ pynapse/warm_storage/service.py,sha256=eRZEDkN0257cVSs665cRQYqZUlL-BDAxfAFD6GY344E,22770
61
+ synapse_filecoin_sdk-0.1.0.dist-info/METADATA,sha256=wDfhAgPxZ8Lfn61jGF3veG9A_YkCnyVzBAfrwiZo_bw,2247
62
+ synapse_filecoin_sdk-0.1.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
63
+ synapse_filecoin_sdk-0.1.0.dist-info/licenses/LICENSE.md,sha256=AkqszG9dHKiuURbWTvtFVRX_X17biR28Tihp8Tt8xO0,12466
64
+ synapse_filecoin_sdk-0.1.0.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: hatchling 1.28.0
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any