synapse-filecoin-sdk 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pynapse/__init__.py +6 -0
- pynapse/_version.py +1 -0
- pynapse/contracts/__init__.py +34 -0
- pynapse/contracts/abi_registry.py +11 -0
- pynapse/contracts/addresses.json +30 -0
- pynapse/contracts/erc20_abi.json +92 -0
- pynapse/contracts/errorsAbi.json +933 -0
- pynapse/contracts/filecoinPayV1Abi.json +2424 -0
- pynapse/contracts/filecoinWarmStorageServiceAbi.json +2363 -0
- pynapse/contracts/filecoinWarmStorageServiceStateViewAbi.json +651 -0
- pynapse/contracts/generated.py +35 -0
- pynapse/contracts/payments_abi.json +205 -0
- pynapse/contracts/pdpVerifierAbi.json +1266 -0
- pynapse/contracts/providerIdSetAbi.json +161 -0
- pynapse/contracts/serviceProviderRegistryAbi.json +1479 -0
- pynapse/contracts/sessionKeyRegistryAbi.json +147 -0
- pynapse/core/__init__.py +68 -0
- pynapse/core/abis.py +25 -0
- pynapse/core/chains.py +97 -0
- pynapse/core/constants.py +27 -0
- pynapse/core/errors.py +22 -0
- pynapse/core/piece.py +263 -0
- pynapse/core/rand.py +14 -0
- pynapse/core/typed_data.py +320 -0
- pynapse/core/utils.py +30 -0
- pynapse/evm/__init__.py +3 -0
- pynapse/evm/client.py +26 -0
- pynapse/filbeam/__init__.py +3 -0
- pynapse/filbeam/service.py +39 -0
- pynapse/payments/__init__.py +17 -0
- pynapse/payments/service.py +826 -0
- pynapse/pdp/__init__.py +21 -0
- pynapse/pdp/server.py +331 -0
- pynapse/pdp/types.py +38 -0
- pynapse/pdp/verifier.py +82 -0
- pynapse/retriever/__init__.py +12 -0
- pynapse/retriever/async_chain.py +227 -0
- pynapse/retriever/chain.py +209 -0
- pynapse/session/__init__.py +12 -0
- pynapse/session/key.py +30 -0
- pynapse/session/permissions.py +57 -0
- pynapse/session/registry.py +90 -0
- pynapse/sp_registry/__init__.py +11 -0
- pynapse/sp_registry/capabilities.py +25 -0
- pynapse/sp_registry/pdp_capabilities.py +102 -0
- pynapse/sp_registry/service.py +446 -0
- pynapse/sp_registry/types.py +52 -0
- pynapse/storage/__init__.py +57 -0
- pynapse/storage/async_context.py +682 -0
- pynapse/storage/async_manager.py +757 -0
- pynapse/storage/context.py +680 -0
- pynapse/storage/manager.py +758 -0
- pynapse/synapse.py +191 -0
- pynapse/utils/__init__.py +25 -0
- pynapse/utils/constants.py +25 -0
- pynapse/utils/errors.py +3 -0
- pynapse/utils/metadata.py +35 -0
- pynapse/utils/piece_url.py +16 -0
- pynapse/warm_storage/__init__.py +13 -0
- pynapse/warm_storage/service.py +513 -0
- synapse_filecoin_sdk-0.1.0.dist-info/METADATA +74 -0
- synapse_filecoin_sdk-0.1.0.dist-info/RECORD +64 -0
- synapse_filecoin_sdk-0.1.0.dist-info/WHEEL +4 -0
- synapse_filecoin_sdk-0.1.0.dist-info/licenses/LICENSE.md +228 -0
pynapse/pdp/__init__.py
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
from .server import AsyncPDPServer, PDPServer
|
|
2
|
+
from .verifier import AsyncPDPVerifier, SyncPDPVerifier
|
|
3
|
+
from .types import (
|
|
4
|
+
AddPiecesResponse,
|
|
5
|
+
CreateDataSetResponse,
|
|
6
|
+
DataSetCreationStatus,
|
|
7
|
+
PieceAdditionStatus,
|
|
8
|
+
UploadPieceResponse,
|
|
9
|
+
)
|
|
10
|
+
|
|
11
|
+
__all__ = [
|
|
12
|
+
"PDPServer",
|
|
13
|
+
"AsyncPDPServer",
|
|
14
|
+
"AddPiecesResponse",
|
|
15
|
+
"CreateDataSetResponse",
|
|
16
|
+
"DataSetCreationStatus",
|
|
17
|
+
"PieceAdditionStatus",
|
|
18
|
+
"UploadPieceResponse",
|
|
19
|
+
"SyncPDPVerifier",
|
|
20
|
+
"AsyncPDPVerifier",
|
|
21
|
+
]
|
pynapse/pdp/server.py
ADDED
|
@@ -0,0 +1,331 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import json
|
|
5
|
+
import re
|
|
6
|
+
import time
|
|
7
|
+
from typing import Iterable, Optional
|
|
8
|
+
|
|
9
|
+
import httpx
|
|
10
|
+
|
|
11
|
+
from .types import (
|
|
12
|
+
AddPiecesResponse,
|
|
13
|
+
CreateDataSetResponse,
|
|
14
|
+
DataSetCreationStatus,
|
|
15
|
+
PieceAdditionStatus,
|
|
16
|
+
UploadPieceResponse,
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class PDPServer:
|
|
21
|
+
def __init__(self, endpoint: str, timeout_seconds: int = 300) -> None:
|
|
22
|
+
self._endpoint = endpoint.rstrip("/")
|
|
23
|
+
self._client = httpx.Client(timeout=timeout_seconds)
|
|
24
|
+
self._upload_client = httpx.Client(timeout=None)
|
|
25
|
+
|
|
26
|
+
@property
|
|
27
|
+
def endpoint(self) -> str:
|
|
28
|
+
return self._endpoint
|
|
29
|
+
|
|
30
|
+
def create_data_set(self, record_keeper: str, extra_data: str) -> CreateDataSetResponse:
|
|
31
|
+
resp = self._client.post(
|
|
32
|
+
f"{self._endpoint}/pdp/data-sets",
|
|
33
|
+
json={"recordKeeper": record_keeper, "extraData": extra_data},
|
|
34
|
+
)
|
|
35
|
+
if resp.status_code not in (201, 202):
|
|
36
|
+
raise RuntimeError(f"unexpected status {resp.status_code}: {resp.text}")
|
|
37
|
+
location = resp.headers.get("Location")
|
|
38
|
+
if not location:
|
|
39
|
+
raise RuntimeError("missing Location header")
|
|
40
|
+
tx_hash = location.split("/")[-1]
|
|
41
|
+
if not tx_hash.startswith("0x"):
|
|
42
|
+
raise RuntimeError(f"invalid txHash in Location header: {tx_hash}")
|
|
43
|
+
return CreateDataSetResponse(tx_hash=tx_hash, status_url=f"{self._endpoint}{location}")
|
|
44
|
+
|
|
45
|
+
def get_data_set_creation_status(self, tx_hash: str) -> DataSetCreationStatus:
|
|
46
|
+
resp = self._client.get(f"{self._endpoint}/pdp/data-sets/created/{tx_hash}")
|
|
47
|
+
if resp.status_code == 404:
|
|
48
|
+
raise RuntimeError(f"data set creation not found for txHash: {tx_hash}")
|
|
49
|
+
if resp.status_code != 200:
|
|
50
|
+
raise RuntimeError(f"unexpected status {resp.status_code}: {resp.text}")
|
|
51
|
+
payload = resp.json()
|
|
52
|
+
return DataSetCreationStatus(
|
|
53
|
+
data_set_created=payload.get("dataSetCreated", False),
|
|
54
|
+
data_set_id=payload.get("dataSetId"),
|
|
55
|
+
message=payload.get("message"),
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
def wait_for_data_set_creation(self, tx_hash: str, timeout_seconds: int = 300, poll_interval: int = 4) -> DataSetCreationStatus:
|
|
59
|
+
deadline = time.time() + timeout_seconds
|
|
60
|
+
while time.time() < deadline:
|
|
61
|
+
status = self.get_data_set_creation_status(tx_hash)
|
|
62
|
+
if status.data_set_created:
|
|
63
|
+
return status
|
|
64
|
+
time.sleep(poll_interval)
|
|
65
|
+
raise TimeoutError("Timed out waiting for data set creation")
|
|
66
|
+
|
|
67
|
+
def add_pieces(self, data_set_id: int, piece_cids: Iterable[str], extra_data: str) -> AddPiecesResponse:
|
|
68
|
+
pieces = [
|
|
69
|
+
{
|
|
70
|
+
"pieceCid": cid,
|
|
71
|
+
"subPieces": [{"subPieceCid": cid}],
|
|
72
|
+
}
|
|
73
|
+
for cid in piece_cids
|
|
74
|
+
]
|
|
75
|
+
resp = self._client.post(
|
|
76
|
+
f"{self._endpoint}/pdp/data-sets/{data_set_id}/pieces",
|
|
77
|
+
json={"pieces": pieces, "extraData": extra_data},
|
|
78
|
+
)
|
|
79
|
+
if resp.status_code not in (201, 202):
|
|
80
|
+
raise RuntimeError(f"unexpected status {resp.status_code}: {resp.text}")
|
|
81
|
+
location = resp.headers.get("Location")
|
|
82
|
+
if not location:
|
|
83
|
+
raise RuntimeError("missing Location header")
|
|
84
|
+
tx_hash = location.split("/")[-1]
|
|
85
|
+
return AddPiecesResponse(
|
|
86
|
+
message=f"Pieces added to data set ID {data_set_id}",
|
|
87
|
+
tx_hash=tx_hash,
|
|
88
|
+
status_url=f"{self._endpoint}{location}",
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
def get_piece_addition_status(self, data_set_id: int, tx_hash: str) -> PieceAdditionStatus:
|
|
92
|
+
resp = self._client.get(f"{self._endpoint}/pdp/data-sets/{data_set_id}/pieces/added/{tx_hash}")
|
|
93
|
+
if resp.status_code == 404:
|
|
94
|
+
raise RuntimeError(f"piece addition not found for txHash: {tx_hash}")
|
|
95
|
+
if resp.status_code != 200:
|
|
96
|
+
raise RuntimeError(f"unexpected status {resp.status_code}: {resp.text}")
|
|
97
|
+
payload = resp.json()
|
|
98
|
+
return PieceAdditionStatus(
|
|
99
|
+
add_message_ok=payload.get("addMessageOk"),
|
|
100
|
+
piece_count=payload.get("pieceCount"),
|
|
101
|
+
confirmed_piece_ids=payload.get("confirmedPieceIds"),
|
|
102
|
+
message=payload.get("message"),
|
|
103
|
+
)
|
|
104
|
+
|
|
105
|
+
def wait_for_piece_addition(self, data_set_id: int, tx_hash: str, timeout_seconds: int = 300, poll_interval: int = 1) -> PieceAdditionStatus:
|
|
106
|
+
deadline = time.time() + timeout_seconds
|
|
107
|
+
while time.time() < deadline:
|
|
108
|
+
status = self.get_piece_addition_status(data_set_id, tx_hash)
|
|
109
|
+
if status.add_message_ok:
|
|
110
|
+
return status
|
|
111
|
+
time.sleep(poll_interval)
|
|
112
|
+
raise TimeoutError("Timed out waiting for piece addition")
|
|
113
|
+
|
|
114
|
+
def upload_piece(self, data: bytes, piece_cid: str, padded_piece_size: int = 0) -> UploadPieceResponse:
|
|
115
|
+
create_resp = self._client.post(f"{self._endpoint}/pdp/piece/uploads")
|
|
116
|
+
if create_resp.status_code != 201:
|
|
117
|
+
raise RuntimeError(f"failed to create upload session: {create_resp.text}")
|
|
118
|
+
location = create_resp.headers.get("Location")
|
|
119
|
+
if not location:
|
|
120
|
+
raise RuntimeError("missing Location header in upload session response")
|
|
121
|
+
match = re.search(r"/pdp/piece/uploads/([a-fA-F0-9-]+)", location)
|
|
122
|
+
if not match:
|
|
123
|
+
raise RuntimeError(f"invalid Location header format: {location}")
|
|
124
|
+
upload_uuid = match.group(1)
|
|
125
|
+
|
|
126
|
+
upload_resp = self._upload_client.put(
|
|
127
|
+
f"{self._endpoint}/pdp/piece/uploads/{upload_uuid}",
|
|
128
|
+
content=data,
|
|
129
|
+
headers={"Content-Type": "application/octet-stream"},
|
|
130
|
+
)
|
|
131
|
+
if upload_resp.status_code != 204:
|
|
132
|
+
raise RuntimeError(f"upload failed: {upload_resp.text}")
|
|
133
|
+
|
|
134
|
+
# PieceCIDv1 requires size to be provided (padded piece size)
|
|
135
|
+
finalize_body = {"pieceCid": piece_cid}
|
|
136
|
+
if padded_piece_size > 0:
|
|
137
|
+
finalize_body["size"] = padded_piece_size
|
|
138
|
+
|
|
139
|
+
finalize_resp = self._client.post(
|
|
140
|
+
f"{self._endpoint}/pdp/piece/uploads/{upload_uuid}",
|
|
141
|
+
json=finalize_body,
|
|
142
|
+
)
|
|
143
|
+
if finalize_resp.status_code != 200:
|
|
144
|
+
raise RuntimeError(f"finalize failed: {finalize_resp.text}")
|
|
145
|
+
|
|
146
|
+
return UploadPieceResponse(piece_cid=piece_cid, size=len(data))
|
|
147
|
+
|
|
148
|
+
def find_piece(self, piece_cid: str) -> None:
|
|
149
|
+
resp = self._client.get(f"{self._endpoint}/pdp/piece", params={"pieceCid": piece_cid})
|
|
150
|
+
if resp.status_code == 404:
|
|
151
|
+
raise RuntimeError(f"piece not found: {piece_cid}")
|
|
152
|
+
if resp.status_code != 200:
|
|
153
|
+
raise RuntimeError(f"unexpected status {resp.status_code}: {resp.text}")
|
|
154
|
+
|
|
155
|
+
def wait_for_piece(self, piece_cid: str, timeout_seconds: int = 300, poll_interval: int = 5) -> None:
|
|
156
|
+
deadline = time.time() + timeout_seconds
|
|
157
|
+
while time.time() < deadline:
|
|
158
|
+
try:
|
|
159
|
+
self.find_piece(piece_cid)
|
|
160
|
+
return
|
|
161
|
+
except RuntimeError as exc:
|
|
162
|
+
if "not found" in str(exc):
|
|
163
|
+
time.sleep(poll_interval)
|
|
164
|
+
continue
|
|
165
|
+
raise
|
|
166
|
+
raise TimeoutError("Timed out waiting for piece to be available")
|
|
167
|
+
|
|
168
|
+
def download_piece(self, piece_cid: str) -> bytes:
|
|
169
|
+
resp = self._client.get(f"{self._endpoint}/pdp/piece/{piece_cid}")
|
|
170
|
+
if resp.status_code == 404:
|
|
171
|
+
raise RuntimeError(f"piece not found: {piece_cid}")
|
|
172
|
+
if resp.status_code != 200:
|
|
173
|
+
raise RuntimeError(f"unexpected status {resp.status_code}: {resp.text}")
|
|
174
|
+
return resp.content
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
class AsyncPDPServer:
|
|
178
|
+
def __init__(self, endpoint: str, timeout_seconds: int = 300) -> None:
|
|
179
|
+
self._endpoint = endpoint.rstrip("/")
|
|
180
|
+
self._client = httpx.AsyncClient(timeout=timeout_seconds)
|
|
181
|
+
self._upload_client = httpx.AsyncClient(timeout=None)
|
|
182
|
+
|
|
183
|
+
@property
|
|
184
|
+
def endpoint(self) -> str:
|
|
185
|
+
return self._endpoint
|
|
186
|
+
|
|
187
|
+
async def create_data_set(self, record_keeper: str, extra_data: str) -> CreateDataSetResponse:
|
|
188
|
+
resp = await self._client.post(
|
|
189
|
+
f"{self._endpoint}/pdp/data-sets",
|
|
190
|
+
json={"recordKeeper": record_keeper, "extraData": extra_data},
|
|
191
|
+
)
|
|
192
|
+
if resp.status_code not in (201, 202):
|
|
193
|
+
raise RuntimeError(f"unexpected status {resp.status_code}: {resp.text}")
|
|
194
|
+
location = resp.headers.get("Location")
|
|
195
|
+
if not location:
|
|
196
|
+
raise RuntimeError("missing Location header")
|
|
197
|
+
tx_hash = location.split("/")[-1]
|
|
198
|
+
if not tx_hash.startswith("0x"):
|
|
199
|
+
raise RuntimeError(f"invalid txHash in Location header: {tx_hash}")
|
|
200
|
+
return CreateDataSetResponse(tx_hash=tx_hash, status_url=f"{self._endpoint}{location}")
|
|
201
|
+
|
|
202
|
+
async def get_data_set_creation_status(self, tx_hash: str) -> DataSetCreationStatus:
|
|
203
|
+
resp = await self._client.get(f"{self._endpoint}/pdp/data-sets/created/{tx_hash}")
|
|
204
|
+
if resp.status_code == 404:
|
|
205
|
+
raise RuntimeError(f"data set creation not found for txHash: {tx_hash}")
|
|
206
|
+
if resp.status_code != 200:
|
|
207
|
+
raise RuntimeError(f"unexpected status {resp.status_code}: {resp.text}")
|
|
208
|
+
payload = resp.json()
|
|
209
|
+
return DataSetCreationStatus(
|
|
210
|
+
data_set_created=payload.get("dataSetCreated", False),
|
|
211
|
+
data_set_id=payload.get("dataSetId"),
|
|
212
|
+
message=payload.get("message"),
|
|
213
|
+
)
|
|
214
|
+
|
|
215
|
+
async def wait_for_data_set_creation(self, tx_hash: str, timeout_seconds: int = 300, poll_interval: int = 4) -> DataSetCreationStatus:
|
|
216
|
+
deadline = time.time() + timeout_seconds
|
|
217
|
+
while time.time() < deadline:
|
|
218
|
+
status = await self.get_data_set_creation_status(tx_hash)
|
|
219
|
+
if status.data_set_created:
|
|
220
|
+
return status
|
|
221
|
+
await asyncio.sleep(poll_interval)
|
|
222
|
+
raise TimeoutError("Timed out waiting for data set creation")
|
|
223
|
+
|
|
224
|
+
async def add_pieces(self, data_set_id: int, piece_cids: Iterable[str], extra_data: str) -> AddPiecesResponse:
|
|
225
|
+
pieces = [
|
|
226
|
+
{
|
|
227
|
+
"pieceCid": cid,
|
|
228
|
+
"subPieces": [{"subPieceCid": cid}],
|
|
229
|
+
}
|
|
230
|
+
for cid in piece_cids
|
|
231
|
+
]
|
|
232
|
+
resp = await self._client.post(
|
|
233
|
+
f"{self._endpoint}/pdp/data-sets/{data_set_id}/pieces",
|
|
234
|
+
json={"pieces": pieces, "extraData": extra_data},
|
|
235
|
+
)
|
|
236
|
+
if resp.status_code not in (201, 202):
|
|
237
|
+
raise RuntimeError(f"unexpected status {resp.status_code}: {resp.text}")
|
|
238
|
+
location = resp.headers.get("Location")
|
|
239
|
+
if not location:
|
|
240
|
+
raise RuntimeError("missing Location header")
|
|
241
|
+
tx_hash = location.split("/")[-1]
|
|
242
|
+
return AddPiecesResponse(
|
|
243
|
+
message=f"Pieces added to data set ID {data_set_id}",
|
|
244
|
+
tx_hash=tx_hash,
|
|
245
|
+
status_url=f"{self._endpoint}{location}",
|
|
246
|
+
)
|
|
247
|
+
|
|
248
|
+
async def get_piece_addition_status(self, data_set_id: int, tx_hash: str) -> PieceAdditionStatus:
|
|
249
|
+
resp = await self._client.get(f"{self._endpoint}/pdp/data-sets/{data_set_id}/pieces/added/{tx_hash}")
|
|
250
|
+
if resp.status_code == 404:
|
|
251
|
+
raise RuntimeError(f"piece addition not found for txHash: {tx_hash}")
|
|
252
|
+
if resp.status_code != 200:
|
|
253
|
+
raise RuntimeError(f"unexpected status {resp.status_code}: {resp.text}")
|
|
254
|
+
payload = resp.json()
|
|
255
|
+
return PieceAdditionStatus(
|
|
256
|
+
add_message_ok=payload.get("addMessageOk"),
|
|
257
|
+
piece_count=payload.get("pieceCount"),
|
|
258
|
+
confirmed_piece_ids=payload.get("confirmedPieceIds"),
|
|
259
|
+
message=payload.get("message"),
|
|
260
|
+
)
|
|
261
|
+
|
|
262
|
+
async def wait_for_piece_addition(self, data_set_id: int, tx_hash: str, timeout_seconds: int = 300, poll_interval: int = 1) -> PieceAdditionStatus:
|
|
263
|
+
deadline = time.time() + timeout_seconds
|
|
264
|
+
while time.time() < deadline:
|
|
265
|
+
status = await self.get_piece_addition_status(data_set_id, tx_hash)
|
|
266
|
+
if status.add_message_ok:
|
|
267
|
+
return status
|
|
268
|
+
await asyncio.sleep(poll_interval)
|
|
269
|
+
raise TimeoutError("Timed out waiting for piece addition")
|
|
270
|
+
|
|
271
|
+
async def upload_piece(self, data: bytes, piece_cid: str, padded_piece_size: int = 0) -> UploadPieceResponse:
|
|
272
|
+
create_resp = await self._client.post(f"{self._endpoint}/pdp/piece/uploads")
|
|
273
|
+
if create_resp.status_code != 201:
|
|
274
|
+
raise RuntimeError(f"failed to create upload session: {create_resp.text}")
|
|
275
|
+
location = create_resp.headers.get("Location")
|
|
276
|
+
if not location:
|
|
277
|
+
raise RuntimeError("missing Location header in upload session response")
|
|
278
|
+
match = re.search(r"/pdp/piece/uploads/([a-fA-F0-9-]+)", location)
|
|
279
|
+
if not match:
|
|
280
|
+
raise RuntimeError(f"invalid Location header format: {location}")
|
|
281
|
+
upload_uuid = match.group(1)
|
|
282
|
+
|
|
283
|
+
upload_resp = await self._upload_client.put(
|
|
284
|
+
f"{self._endpoint}/pdp/piece/uploads/{upload_uuid}",
|
|
285
|
+
content=data,
|
|
286
|
+
headers={"Content-Type": "application/octet-stream"},
|
|
287
|
+
)
|
|
288
|
+
if upload_resp.status_code != 204:
|
|
289
|
+
raise RuntimeError(f"upload failed: {upload_resp.text}")
|
|
290
|
+
|
|
291
|
+
# PieceCIDv1 requires size to be provided (padded piece size)
|
|
292
|
+
finalize_body = {"pieceCid": piece_cid}
|
|
293
|
+
if padded_piece_size > 0:
|
|
294
|
+
finalize_body["size"] = padded_piece_size
|
|
295
|
+
|
|
296
|
+
finalize_resp = await self._client.post(
|
|
297
|
+
f"{self._endpoint}/pdp/piece/uploads/{upload_uuid}",
|
|
298
|
+
json=finalize_body,
|
|
299
|
+
)
|
|
300
|
+
if finalize_resp.status_code != 200:
|
|
301
|
+
raise RuntimeError(f"finalize failed: {finalize_resp.text}")
|
|
302
|
+
|
|
303
|
+
return UploadPieceResponse(piece_cid=piece_cid, size=len(data))
|
|
304
|
+
|
|
305
|
+
async def find_piece(self, piece_cid: str) -> None:
|
|
306
|
+
resp = await self._client.get(f"{self._endpoint}/pdp/piece", params={"pieceCid": piece_cid})
|
|
307
|
+
if resp.status_code == 404:
|
|
308
|
+
raise RuntimeError(f"piece not found: {piece_cid}")
|
|
309
|
+
if resp.status_code != 200:
|
|
310
|
+
raise RuntimeError(f"unexpected status {resp.status_code}: {resp.text}")
|
|
311
|
+
|
|
312
|
+
async def wait_for_piece(self, piece_cid: str, timeout_seconds: int = 300, poll_interval: int = 5) -> None:
|
|
313
|
+
deadline = time.time() + timeout_seconds
|
|
314
|
+
while time.time() < deadline:
|
|
315
|
+
try:
|
|
316
|
+
await self.find_piece(piece_cid)
|
|
317
|
+
return
|
|
318
|
+
except RuntimeError as exc:
|
|
319
|
+
if "not found" in str(exc):
|
|
320
|
+
await asyncio.sleep(poll_interval)
|
|
321
|
+
continue
|
|
322
|
+
raise
|
|
323
|
+
raise TimeoutError("Timed out waiting for piece to be available")
|
|
324
|
+
|
|
325
|
+
async def download_piece(self, piece_cid: str) -> bytes:
|
|
326
|
+
resp = await self._client.get(f"{self._endpoint}/pdp/piece/{piece_cid}")
|
|
327
|
+
if resp.status_code == 404:
|
|
328
|
+
raise RuntimeError(f"piece not found: {piece_cid}")
|
|
329
|
+
if resp.status_code != 200:
|
|
330
|
+
raise RuntimeError(f"unexpected status {resp.status_code}: {resp.text}")
|
|
331
|
+
return resp.content
|
pynapse/pdp/types.py
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from typing import List, Optional
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
@dataclass
|
|
8
|
+
class CreateDataSetResponse:
|
|
9
|
+
tx_hash: str
|
|
10
|
+
status_url: str
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@dataclass
|
|
14
|
+
class AddPiecesResponse:
|
|
15
|
+
message: str
|
|
16
|
+
tx_hash: str
|
|
17
|
+
status_url: str
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@dataclass
|
|
21
|
+
class UploadPieceResponse:
|
|
22
|
+
piece_cid: str
|
|
23
|
+
size: int
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
@dataclass
|
|
27
|
+
class DataSetCreationStatus:
|
|
28
|
+
data_set_created: bool
|
|
29
|
+
data_set_id: Optional[int]
|
|
30
|
+
message: Optional[str] = None
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
@dataclass
|
|
34
|
+
class PieceAdditionStatus:
|
|
35
|
+
add_message_ok: Optional[bool]
|
|
36
|
+
piece_count: Optional[int]
|
|
37
|
+
confirmed_piece_ids: Optional[List[int]]
|
|
38
|
+
message: Optional[str] = None
|
pynapse/pdp/verifier.py
ADDED
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from web3 import AsyncWeb3, Web3
|
|
4
|
+
|
|
5
|
+
from pynapse.contracts import PDP_VERIFIER_ABI
|
|
6
|
+
from pynapse.core.chains import Chain
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class SyncPDPVerifier:
|
|
10
|
+
def __init__(self, web3: Web3, chain: Chain) -> None:
|
|
11
|
+
self._web3 = web3
|
|
12
|
+
self._chain = chain
|
|
13
|
+
self._contract = web3.eth.contract(address=chain.contracts.pdp_verifier, abi=PDP_VERIFIER_ABI)
|
|
14
|
+
|
|
15
|
+
def data_set_live(self, data_set_id: int) -> bool:
|
|
16
|
+
return bool(self._contract.functions.dataSetLive(data_set_id).call())
|
|
17
|
+
|
|
18
|
+
def get_active_piece_count(self, data_set_id: int) -> int:
|
|
19
|
+
return int(self._contract.functions.getActivePieceCount(data_set_id).call())
|
|
20
|
+
|
|
21
|
+
def get_active_pieces(self, data_set_id: int, offset: int, limit: int):
|
|
22
|
+
return self._contract.functions.getActivePieces(data_set_id, offset, limit).call()
|
|
23
|
+
|
|
24
|
+
def get_data_set_leaf_count(self, data_set_id: int) -> int:
|
|
25
|
+
return int(self._contract.functions.getDataSetLeafCount(data_set_id).call())
|
|
26
|
+
|
|
27
|
+
def get_data_set_listener(self, data_set_id: int) -> str:
|
|
28
|
+
return self._contract.functions.getDataSetListener(data_set_id).call()
|
|
29
|
+
|
|
30
|
+
def get_data_set_storage_provider(self, data_set_id: int) -> str:
|
|
31
|
+
return self._contract.functions.getDataSetStorageProvider(data_set_id).call()
|
|
32
|
+
|
|
33
|
+
def get_next_piece_id(self, data_set_id: int) -> int:
|
|
34
|
+
return int(self._contract.functions.getNextPieceId(data_set_id).call())
|
|
35
|
+
|
|
36
|
+
def get_scheduled_removals(self, data_set_id: int):
|
|
37
|
+
return self._contract.functions.getScheduledRemovals(data_set_id).call()
|
|
38
|
+
|
|
39
|
+
def get_piece_cid(self, data_set_id: int, piece_id: int) -> bytes:
|
|
40
|
+
result = self._contract.functions.getPieceCid(data_set_id, piece_id).call()
|
|
41
|
+
return result[0]
|
|
42
|
+
|
|
43
|
+
def piece_live(self, data_set_id: int, piece_id: int) -> bool:
|
|
44
|
+
return bool(self._contract.functions.pieceLive(data_set_id, piece_id).call())
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class AsyncPDPVerifier:
|
|
48
|
+
def __init__(self, web3: AsyncWeb3, chain: Chain) -> None:
|
|
49
|
+
self._web3 = web3
|
|
50
|
+
self._chain = chain
|
|
51
|
+
self._contract = web3.eth.contract(address=chain.contracts.pdp_verifier, abi=PDP_VERIFIER_ABI)
|
|
52
|
+
|
|
53
|
+
async def data_set_live(self, data_set_id: int) -> bool:
|
|
54
|
+
return bool(await self._contract.functions.dataSetLive(data_set_id).call())
|
|
55
|
+
|
|
56
|
+
async def get_active_piece_count(self, data_set_id: int) -> int:
|
|
57
|
+
return int(await self._contract.functions.getActivePieceCount(data_set_id).call())
|
|
58
|
+
|
|
59
|
+
async def get_active_pieces(self, data_set_id: int, offset: int, limit: int):
|
|
60
|
+
return await self._contract.functions.getActivePieces(data_set_id, offset, limit).call()
|
|
61
|
+
|
|
62
|
+
async def get_data_set_leaf_count(self, data_set_id: int) -> int:
|
|
63
|
+
return int(await self._contract.functions.getDataSetLeafCount(data_set_id).call())
|
|
64
|
+
|
|
65
|
+
async def get_data_set_listener(self, data_set_id: int) -> str:
|
|
66
|
+
return await self._contract.functions.getDataSetListener(data_set_id).call()
|
|
67
|
+
|
|
68
|
+
async def get_data_set_storage_provider(self, data_set_id: int) -> str:
|
|
69
|
+
return await self._contract.functions.getDataSetStorageProvider(data_set_id).call()
|
|
70
|
+
|
|
71
|
+
async def get_next_piece_id(self, data_set_id: int) -> int:
|
|
72
|
+
return int(await self._contract.functions.getNextPieceId(data_set_id).call())
|
|
73
|
+
|
|
74
|
+
async def get_scheduled_removals(self, data_set_id: int):
|
|
75
|
+
return await self._contract.functions.getScheduledRemovals(data_set_id).call()
|
|
76
|
+
|
|
77
|
+
async def get_piece_cid(self, data_set_id: int, piece_id: int) -> bytes:
|
|
78
|
+
result = await self._contract.functions.getPieceCid(data_set_id, piece_id).call()
|
|
79
|
+
return result[0]
|
|
80
|
+
|
|
81
|
+
async def piece_live(self, data_set_id: int, piece_id: int) -> bool:
|
|
82
|
+
return bool(await self._contract.functions.pieceLive(data_set_id, piece_id).call())
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
"""
|
|
2
|
+
PieceRetriever implementations for flexible piece fetching.
|
|
3
|
+
|
|
4
|
+
This module provides different strategies for retrieving pieces:
|
|
5
|
+
- ChainRetriever: Queries on-chain data to find providers (sync)
|
|
6
|
+
- AsyncChainRetriever: Async version for Python async/await patterns
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from .chain import ChainRetriever
|
|
10
|
+
from .async_chain import AsyncChainRetriever
|
|
11
|
+
|
|
12
|
+
__all__ = ["ChainRetriever", "AsyncChainRetriever"]
|