synapse-filecoin-sdk 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pynapse/__init__.py +6 -0
- pynapse/_version.py +1 -0
- pynapse/contracts/__init__.py +34 -0
- pynapse/contracts/abi_registry.py +11 -0
- pynapse/contracts/addresses.json +30 -0
- pynapse/contracts/erc20_abi.json +92 -0
- pynapse/contracts/errorsAbi.json +933 -0
- pynapse/contracts/filecoinPayV1Abi.json +2424 -0
- pynapse/contracts/filecoinWarmStorageServiceAbi.json +2363 -0
- pynapse/contracts/filecoinWarmStorageServiceStateViewAbi.json +651 -0
- pynapse/contracts/generated.py +35 -0
- pynapse/contracts/payments_abi.json +205 -0
- pynapse/contracts/pdpVerifierAbi.json +1266 -0
- pynapse/contracts/providerIdSetAbi.json +161 -0
- pynapse/contracts/serviceProviderRegistryAbi.json +1479 -0
- pynapse/contracts/sessionKeyRegistryAbi.json +147 -0
- pynapse/core/__init__.py +68 -0
- pynapse/core/abis.py +25 -0
- pynapse/core/chains.py +97 -0
- pynapse/core/constants.py +27 -0
- pynapse/core/errors.py +22 -0
- pynapse/core/piece.py +263 -0
- pynapse/core/rand.py +14 -0
- pynapse/core/typed_data.py +320 -0
- pynapse/core/utils.py +30 -0
- pynapse/evm/__init__.py +3 -0
- pynapse/evm/client.py +26 -0
- pynapse/filbeam/__init__.py +3 -0
- pynapse/filbeam/service.py +39 -0
- pynapse/payments/__init__.py +17 -0
- pynapse/payments/service.py +826 -0
- pynapse/pdp/__init__.py +21 -0
- pynapse/pdp/server.py +331 -0
- pynapse/pdp/types.py +38 -0
- pynapse/pdp/verifier.py +82 -0
- pynapse/retriever/__init__.py +12 -0
- pynapse/retriever/async_chain.py +227 -0
- pynapse/retriever/chain.py +209 -0
- pynapse/session/__init__.py +12 -0
- pynapse/session/key.py +30 -0
- pynapse/session/permissions.py +57 -0
- pynapse/session/registry.py +90 -0
- pynapse/sp_registry/__init__.py +11 -0
- pynapse/sp_registry/capabilities.py +25 -0
- pynapse/sp_registry/pdp_capabilities.py +102 -0
- pynapse/sp_registry/service.py +446 -0
- pynapse/sp_registry/types.py +52 -0
- pynapse/storage/__init__.py +57 -0
- pynapse/storage/async_context.py +682 -0
- pynapse/storage/async_manager.py +757 -0
- pynapse/storage/context.py +680 -0
- pynapse/storage/manager.py +758 -0
- pynapse/synapse.py +191 -0
- pynapse/utils/__init__.py +25 -0
- pynapse/utils/constants.py +25 -0
- pynapse/utils/errors.py +3 -0
- pynapse/utils/metadata.py +35 -0
- pynapse/utils/piece_url.py +16 -0
- pynapse/warm_storage/__init__.py +13 -0
- pynapse/warm_storage/service.py +513 -0
- synapse_filecoin_sdk-0.1.0.dist-info/METADATA +74 -0
- synapse_filecoin_sdk-0.1.0.dist-info/RECORD +64 -0
- synapse_filecoin_sdk-0.1.0.dist-info/WHEEL +4 -0
- synapse_filecoin_sdk-0.1.0.dist-info/licenses/LICENSE.md +228 -0
|
@@ -0,0 +1,227 @@
|
|
|
1
|
+
"""
|
|
2
|
+
AsyncChainRetriever - Async queries to on-chain data for piece retrieval.
|
|
3
|
+
|
|
4
|
+
This retriever uses the Async Warm Storage service to find service providers
|
|
5
|
+
that have the requested piece, then attempts to download from them.
|
|
6
|
+
"""
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import asyncio
|
|
10
|
+
from typing import List, Optional, Protocol, TYPE_CHECKING
|
|
11
|
+
|
|
12
|
+
import httpx
|
|
13
|
+
|
|
14
|
+
if TYPE_CHECKING:
|
|
15
|
+
from pynapse.sp_registry import AsyncSPRegistryService, ProviderInfo
|
|
16
|
+
from pynapse.warm_storage import AsyncWarmStorageService
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class AsyncPieceRetriever(Protocol):
|
|
20
|
+
"""Protocol for async piece retrieval implementations."""
|
|
21
|
+
|
|
22
|
+
async def fetch_piece(
|
|
23
|
+
self,
|
|
24
|
+
piece_cid: str,
|
|
25
|
+
client_address: str,
|
|
26
|
+
provider_address: Optional[str] = None,
|
|
27
|
+
) -> bytes:
|
|
28
|
+
"""Fetch a piece by CID asynchronously."""
|
|
29
|
+
...
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class AsyncChainRetriever:
|
|
33
|
+
"""
|
|
34
|
+
Async retriever that queries on-chain data to find providers with the piece.
|
|
35
|
+
|
|
36
|
+
This is the standard async retriever that:
|
|
37
|
+
1. Looks up the client's datasets to find active providers
|
|
38
|
+
2. Tries to download from each provider until one succeeds
|
|
39
|
+
|
|
40
|
+
Example:
|
|
41
|
+
retriever = AsyncChainRetriever(warm_storage, sp_registry)
|
|
42
|
+
data = await retriever.fetch_piece(piece_cid, client_address)
|
|
43
|
+
"""
|
|
44
|
+
|
|
45
|
+
def __init__(
|
|
46
|
+
self,
|
|
47
|
+
warm_storage: "AsyncWarmStorageService",
|
|
48
|
+
sp_registry: "AsyncSPRegistryService",
|
|
49
|
+
fallback_retriever: Optional[AsyncPieceRetriever] = None,
|
|
50
|
+
timeout: float = 30.0,
|
|
51
|
+
) -> None:
|
|
52
|
+
self._warm_storage = warm_storage
|
|
53
|
+
self._sp_registry = sp_registry
|
|
54
|
+
self._fallback = fallback_retriever
|
|
55
|
+
self._timeout = timeout
|
|
56
|
+
|
|
57
|
+
async def _get_pdp_endpoint(self, provider_id: int) -> Optional[str]:
|
|
58
|
+
"""Get PDP service URL for a provider."""
|
|
59
|
+
try:
|
|
60
|
+
product = await self._sp_registry.get_provider_with_product(provider_id, 1)
|
|
61
|
+
for i, key in enumerate(product.product.capability_keys):
|
|
62
|
+
if key == "serviceURL" and i < len(product.product_capability_values):
|
|
63
|
+
return product.product_capability_values[i]
|
|
64
|
+
except Exception:
|
|
65
|
+
pass
|
|
66
|
+
return None
|
|
67
|
+
|
|
68
|
+
async def _find_providers(
|
|
69
|
+
self,
|
|
70
|
+
client_address: str,
|
|
71
|
+
provider_address: Optional[str] = None,
|
|
72
|
+
) -> List["ProviderInfo"]:
|
|
73
|
+
"""Find providers that can serve pieces for a client."""
|
|
74
|
+
|
|
75
|
+
if provider_address is not None:
|
|
76
|
+
# Direct provider case
|
|
77
|
+
provider = await self._sp_registry.get_provider_by_address(provider_address)
|
|
78
|
+
if provider is None:
|
|
79
|
+
raise ValueError(f"Provider {provider_address} not found in registry")
|
|
80
|
+
return [provider]
|
|
81
|
+
|
|
82
|
+
# Get client's datasets with details
|
|
83
|
+
datasets = await self._warm_storage.get_client_data_sets_with_details(client_address)
|
|
84
|
+
|
|
85
|
+
# Filter for live datasets with pieces
|
|
86
|
+
valid_datasets = [
|
|
87
|
+
ds for ds in datasets
|
|
88
|
+
if ds.is_live and ds.active_piece_count > 0
|
|
89
|
+
]
|
|
90
|
+
|
|
91
|
+
if not valid_datasets:
|
|
92
|
+
raise ValueError(f"No active datasets with data found for client {client_address}")
|
|
93
|
+
|
|
94
|
+
# Get unique provider IDs
|
|
95
|
+
unique_provider_ids = list(set(ds.provider_id for ds in valid_datasets))
|
|
96
|
+
|
|
97
|
+
# Fetch provider info for each concurrently
|
|
98
|
+
async def get_provider_if_active(pid: int) -> Optional["ProviderInfo"]:
|
|
99
|
+
try:
|
|
100
|
+
provider = await self._sp_registry.get_provider(pid)
|
|
101
|
+
if provider and provider.is_active:
|
|
102
|
+
return provider
|
|
103
|
+
except Exception:
|
|
104
|
+
pass
|
|
105
|
+
return None
|
|
106
|
+
|
|
107
|
+
results = await asyncio.gather(*[get_provider_if_active(pid) for pid in unique_provider_ids])
|
|
108
|
+
providers = [p for p in results if p is not None]
|
|
109
|
+
|
|
110
|
+
if not providers:
|
|
111
|
+
raise ValueError("No valid providers found")
|
|
112
|
+
|
|
113
|
+
return providers
|
|
114
|
+
|
|
115
|
+
async def _try_fetch_from_provider(
|
|
116
|
+
self,
|
|
117
|
+
provider: "ProviderInfo",
|
|
118
|
+
piece_cid: str,
|
|
119
|
+
) -> Optional[bytes]:
|
|
120
|
+
"""Try to fetch a piece from a specific provider."""
|
|
121
|
+
endpoint = await self._get_pdp_endpoint(provider.provider_id)
|
|
122
|
+
if not endpoint:
|
|
123
|
+
return None
|
|
124
|
+
|
|
125
|
+
try:
|
|
126
|
+
async with httpx.AsyncClient(timeout=self._timeout) as client:
|
|
127
|
+
# First check if provider has the piece
|
|
128
|
+
find_resp = await client.get(
|
|
129
|
+
f"{endpoint.rstrip('/')}/pdp/piece",
|
|
130
|
+
params={"pieceCid": piece_cid},
|
|
131
|
+
)
|
|
132
|
+
if find_resp.status_code != 200:
|
|
133
|
+
return None
|
|
134
|
+
|
|
135
|
+
# Download the piece
|
|
136
|
+
download_resp = await client.get(
|
|
137
|
+
f"{endpoint.rstrip('/')}/pdp/piece/{piece_cid}",
|
|
138
|
+
)
|
|
139
|
+
if download_resp.status_code == 200:
|
|
140
|
+
return download_resp.content
|
|
141
|
+
|
|
142
|
+
except Exception:
|
|
143
|
+
pass
|
|
144
|
+
|
|
145
|
+
return None
|
|
146
|
+
|
|
147
|
+
async def fetch_piece(
|
|
148
|
+
self,
|
|
149
|
+
piece_cid: str,
|
|
150
|
+
client_address: str,
|
|
151
|
+
provider_address: Optional[str] = None,
|
|
152
|
+
parallel: bool = True,
|
|
153
|
+
) -> bytes:
|
|
154
|
+
"""
|
|
155
|
+
Fetch a piece by CID.
|
|
156
|
+
|
|
157
|
+
Args:
|
|
158
|
+
piece_cid: The piece CID to fetch
|
|
159
|
+
client_address: The client address to look up datasets for
|
|
160
|
+
provider_address: Optional specific provider address
|
|
161
|
+
parallel: Whether to try providers in parallel (default: True)
|
|
162
|
+
|
|
163
|
+
Returns:
|
|
164
|
+
The piece data as bytes
|
|
165
|
+
|
|
166
|
+
Raises:
|
|
167
|
+
ValueError: If piece cannot be found
|
|
168
|
+
"""
|
|
169
|
+
providers = await self._find_providers(client_address, provider_address)
|
|
170
|
+
|
|
171
|
+
if parallel:
|
|
172
|
+
# Try all providers in parallel, return first success
|
|
173
|
+
async def try_provider(provider: "ProviderInfo") -> Optional[bytes]:
|
|
174
|
+
return await self._try_fetch_from_provider(provider, piece_cid)
|
|
175
|
+
|
|
176
|
+
# Use asyncio.as_completed for first success
|
|
177
|
+
tasks = [asyncio.create_task(try_provider(p)) for p in providers]
|
|
178
|
+
|
|
179
|
+
for completed_task in asyncio.as_completed(tasks):
|
|
180
|
+
try:
|
|
181
|
+
result = await completed_task
|
|
182
|
+
if result is not None:
|
|
183
|
+
# Cancel remaining tasks
|
|
184
|
+
for task in tasks:
|
|
185
|
+
task.cancel()
|
|
186
|
+
return result
|
|
187
|
+
except Exception:
|
|
188
|
+
continue
|
|
189
|
+
|
|
190
|
+
else:
|
|
191
|
+
# Try providers sequentially
|
|
192
|
+
for provider in providers:
|
|
193
|
+
data = await self._try_fetch_from_provider(provider, piece_cid)
|
|
194
|
+
if data is not None:
|
|
195
|
+
return data
|
|
196
|
+
|
|
197
|
+
# Try fallback if configured
|
|
198
|
+
if self._fallback is not None:
|
|
199
|
+
return await self._fallback.fetch_piece(piece_cid, client_address, provider_address)
|
|
200
|
+
|
|
201
|
+
raise ValueError(f"Piece {piece_cid} not found on any provider")
|
|
202
|
+
|
|
203
|
+
async def fetch_pieces(
|
|
204
|
+
self,
|
|
205
|
+
piece_cids: List[str],
|
|
206
|
+
client_address: str,
|
|
207
|
+
provider_address: Optional[str] = None,
|
|
208
|
+
) -> List[bytes]:
|
|
209
|
+
"""
|
|
210
|
+
Fetch multiple pieces concurrently.
|
|
211
|
+
|
|
212
|
+
Args:
|
|
213
|
+
piece_cids: List of piece CIDs to fetch
|
|
214
|
+
client_address: The client address to look up datasets for
|
|
215
|
+
provider_address: Optional specific provider address
|
|
216
|
+
|
|
217
|
+
Returns:
|
|
218
|
+
List of piece data as bytes (in same order as input CIDs)
|
|
219
|
+
|
|
220
|
+
Raises:
|
|
221
|
+
ValueError: If any piece cannot be found
|
|
222
|
+
"""
|
|
223
|
+
tasks = [
|
|
224
|
+
self.fetch_piece(cid, client_address, provider_address)
|
|
225
|
+
for cid in piece_cids
|
|
226
|
+
]
|
|
227
|
+
return list(await asyncio.gather(*tasks))
|
|
@@ -0,0 +1,209 @@
|
|
|
1
|
+
"""
|
|
2
|
+
ChainRetriever - Queries on-chain data to find and retrieve pieces.
|
|
3
|
+
|
|
4
|
+
This retriever uses the Warm Storage service to find service providers
|
|
5
|
+
that have the requested piece, then attempts to download from them.
|
|
6
|
+
"""
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import concurrent.futures
|
|
10
|
+
from typing import List, Optional, Protocol, TYPE_CHECKING
|
|
11
|
+
|
|
12
|
+
import httpx
|
|
13
|
+
|
|
14
|
+
if TYPE_CHECKING:
|
|
15
|
+
from pynapse.sp_registry import ProviderInfo
|
|
16
|
+
from pynapse.warm_storage import SyncWarmStorageService
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class PieceRetriever(Protocol):
|
|
20
|
+
"""Protocol for piece retrieval implementations."""
|
|
21
|
+
|
|
22
|
+
def fetch_piece(
|
|
23
|
+
self,
|
|
24
|
+
piece_cid: str,
|
|
25
|
+
client_address: str,
|
|
26
|
+
provider_address: Optional[str] = None,
|
|
27
|
+
) -> bytes:
|
|
28
|
+
"""Fetch a piece by CID."""
|
|
29
|
+
...
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class ChainRetriever:
|
|
33
|
+
"""
|
|
34
|
+
Retriever that queries on-chain data to find providers with the piece.
|
|
35
|
+
|
|
36
|
+
This is the standard retriever that:
|
|
37
|
+
1. Looks up the client's datasets to find active providers
|
|
38
|
+
2. Tries to download from each provider until one succeeds
|
|
39
|
+
|
|
40
|
+
Example:
|
|
41
|
+
retriever = ChainRetriever(warm_storage, sp_registry)
|
|
42
|
+
data = retriever.fetch_piece(piece_cid, client_address)
|
|
43
|
+
"""
|
|
44
|
+
|
|
45
|
+
def __init__(
|
|
46
|
+
self,
|
|
47
|
+
warm_storage: "SyncWarmStorageService",
|
|
48
|
+
sp_registry,
|
|
49
|
+
fallback_retriever: Optional[PieceRetriever] = None,
|
|
50
|
+
timeout: float = 30.0,
|
|
51
|
+
) -> None:
|
|
52
|
+
self._warm_storage = warm_storage
|
|
53
|
+
self._sp_registry = sp_registry
|
|
54
|
+
self._fallback = fallback_retriever
|
|
55
|
+
self._timeout = timeout
|
|
56
|
+
self._client = httpx.Client(timeout=timeout)
|
|
57
|
+
|
|
58
|
+
def _get_pdp_endpoint(self, provider_id: int) -> Optional[str]:
|
|
59
|
+
"""Get PDP service URL for a provider."""
|
|
60
|
+
try:
|
|
61
|
+
product = self._sp_registry.get_provider_with_product(provider_id, 1)
|
|
62
|
+
for i, key in enumerate(product.product.capability_keys):
|
|
63
|
+
if key == "serviceURL" and i < len(product.product_capability_values):
|
|
64
|
+
return product.product_capability_values[i]
|
|
65
|
+
except Exception:
|
|
66
|
+
pass
|
|
67
|
+
return None
|
|
68
|
+
|
|
69
|
+
def _find_providers(
|
|
70
|
+
self,
|
|
71
|
+
client_address: str,
|
|
72
|
+
provider_address: Optional[str] = None,
|
|
73
|
+
) -> List["ProviderInfo"]:
|
|
74
|
+
"""Find providers that can serve pieces for a client."""
|
|
75
|
+
|
|
76
|
+
if provider_address is not None:
|
|
77
|
+
# Direct provider case
|
|
78
|
+
provider = self._sp_registry.get_provider_by_address(provider_address)
|
|
79
|
+
if provider is None:
|
|
80
|
+
raise ValueError(f"Provider {provider_address} not found in registry")
|
|
81
|
+
return [provider]
|
|
82
|
+
|
|
83
|
+
# Get client's datasets with details
|
|
84
|
+
datasets = self._warm_storage.get_client_data_sets_with_details(client_address)
|
|
85
|
+
|
|
86
|
+
# Filter for live datasets with pieces
|
|
87
|
+
valid_datasets = [
|
|
88
|
+
ds for ds in datasets
|
|
89
|
+
if ds.is_live and ds.active_piece_count > 0
|
|
90
|
+
]
|
|
91
|
+
|
|
92
|
+
if not valid_datasets:
|
|
93
|
+
raise ValueError(f"No active datasets with data found for client {client_address}")
|
|
94
|
+
|
|
95
|
+
# Get unique provider IDs
|
|
96
|
+
unique_provider_ids = list(set(ds.provider_id for ds in valid_datasets))
|
|
97
|
+
|
|
98
|
+
# Fetch provider info for each
|
|
99
|
+
providers = []
|
|
100
|
+
for pid in unique_provider_ids:
|
|
101
|
+
try:
|
|
102
|
+
provider = self._sp_registry.get_provider(pid)
|
|
103
|
+
if provider and provider.is_active:
|
|
104
|
+
providers.append(provider)
|
|
105
|
+
except Exception:
|
|
106
|
+
continue
|
|
107
|
+
|
|
108
|
+
if not providers:
|
|
109
|
+
raise ValueError("No valid providers found")
|
|
110
|
+
|
|
111
|
+
return providers
|
|
112
|
+
|
|
113
|
+
def _try_fetch_from_provider(
|
|
114
|
+
self,
|
|
115
|
+
provider: "ProviderInfo",
|
|
116
|
+
piece_cid: str,
|
|
117
|
+
) -> Optional[bytes]:
|
|
118
|
+
"""Try to fetch a piece from a specific provider."""
|
|
119
|
+
endpoint = self._get_pdp_endpoint(provider.provider_id)
|
|
120
|
+
if not endpoint:
|
|
121
|
+
return None
|
|
122
|
+
|
|
123
|
+
try:
|
|
124
|
+
# First check if provider has the piece
|
|
125
|
+
find_resp = self._client.get(
|
|
126
|
+
f"{endpoint.rstrip('/')}/pdp/piece",
|
|
127
|
+
params={"pieceCid": piece_cid},
|
|
128
|
+
)
|
|
129
|
+
if find_resp.status_code != 200:
|
|
130
|
+
return None
|
|
131
|
+
|
|
132
|
+
# Download the piece
|
|
133
|
+
download_resp = self._client.get(
|
|
134
|
+
f"{endpoint.rstrip('/')}/pdp/piece/{piece_cid}",
|
|
135
|
+
)
|
|
136
|
+
if download_resp.status_code == 200:
|
|
137
|
+
return download_resp.content
|
|
138
|
+
|
|
139
|
+
except Exception:
|
|
140
|
+
pass
|
|
141
|
+
|
|
142
|
+
return None
|
|
143
|
+
|
|
144
|
+
def fetch_piece(
|
|
145
|
+
self,
|
|
146
|
+
piece_cid: str,
|
|
147
|
+
client_address: str,
|
|
148
|
+
provider_address: Optional[str] = None,
|
|
149
|
+
parallel: bool = True,
|
|
150
|
+
) -> bytes:
|
|
151
|
+
"""
|
|
152
|
+
Fetch a piece by CID, trying multiple providers if needed.
|
|
153
|
+
|
|
154
|
+
Args:
|
|
155
|
+
piece_cid: The piece CID to fetch
|
|
156
|
+
client_address: The client address to look up datasets for
|
|
157
|
+
provider_address: Optional specific provider to use
|
|
158
|
+
parallel: Whether to try providers in parallel (default: True)
|
|
159
|
+
|
|
160
|
+
Returns:
|
|
161
|
+
The piece data as bytes
|
|
162
|
+
|
|
163
|
+
Raises:
|
|
164
|
+
ValueError: If piece cannot be found on any provider
|
|
165
|
+
"""
|
|
166
|
+
try:
|
|
167
|
+
providers = self._find_providers(client_address, provider_address)
|
|
168
|
+
except ValueError as e:
|
|
169
|
+
if self._fallback:
|
|
170
|
+
return self._fallback.fetch_piece(piece_cid, client_address, provider_address)
|
|
171
|
+
raise
|
|
172
|
+
|
|
173
|
+
if parallel and len(providers) > 1:
|
|
174
|
+
# Try all providers in parallel, return first success
|
|
175
|
+
with concurrent.futures.ThreadPoolExecutor(max_workers=len(providers)) as executor:
|
|
176
|
+
futures = {
|
|
177
|
+
executor.submit(self._try_fetch_from_provider, p, piece_cid): p
|
|
178
|
+
for p in providers
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
for future in concurrent.futures.as_completed(futures):
|
|
182
|
+
result = future.result()
|
|
183
|
+
if result is not None:
|
|
184
|
+
# Cancel remaining futures
|
|
185
|
+
for f in futures:
|
|
186
|
+
f.cancel()
|
|
187
|
+
return result
|
|
188
|
+
else:
|
|
189
|
+
# Try providers sequentially
|
|
190
|
+
for provider in providers:
|
|
191
|
+
result = self._try_fetch_from_provider(provider, piece_cid)
|
|
192
|
+
if result is not None:
|
|
193
|
+
return result
|
|
194
|
+
|
|
195
|
+
# All providers failed
|
|
196
|
+
if self._fallback:
|
|
197
|
+
return self._fallback.fetch_piece(piece_cid, client_address, provider_address)
|
|
198
|
+
|
|
199
|
+
raise ValueError(f"Failed to retrieve piece {piece_cid} from any provider")
|
|
200
|
+
|
|
201
|
+
def close(self) -> None:
|
|
202
|
+
"""Close the HTTP client."""
|
|
203
|
+
self._client.close()
|
|
204
|
+
|
|
205
|
+
def __enter__(self) -> "ChainRetriever":
|
|
206
|
+
return self
|
|
207
|
+
|
|
208
|
+
def __exit__(self, *args) -> None:
|
|
209
|
+
self.close()
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
from .key import SessionKey
|
|
2
|
+
from .permissions import ALL_PERMISSIONS, SESSION_KEY_PERMISSIONS, get_permission_from_type_hash
|
|
3
|
+
from .registry import AsyncSessionKeyRegistry, SyncSessionKeyRegistry
|
|
4
|
+
|
|
5
|
+
__all__ = [
|
|
6
|
+
"SessionKey",
|
|
7
|
+
"ALL_PERMISSIONS",
|
|
8
|
+
"SESSION_KEY_PERMISSIONS",
|
|
9
|
+
"get_permission_from_type_hash",
|
|
10
|
+
"AsyncSessionKeyRegistry",
|
|
11
|
+
"SyncSessionKeyRegistry",
|
|
12
|
+
]
|
pynapse/session/key.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from typing import Dict, Iterable, List, Optional
|
|
5
|
+
|
|
6
|
+
from pynapse.core.chains import Chain
|
|
7
|
+
from .permissions import ALL_PERMISSIONS
|
|
8
|
+
from .registry import SyncSessionKeyRegistry
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class SessionKey:
|
|
12
|
+
def __init__(self, chain: Chain, registry: SyncSessionKeyRegistry, owner_address: str, session_key_address: str) -> None:
|
|
13
|
+
self._chain = chain
|
|
14
|
+
self._registry = registry
|
|
15
|
+
self._owner_address = owner_address
|
|
16
|
+
self._session_key_address = session_key_address
|
|
17
|
+
|
|
18
|
+
def fetch_expiries(self, permissions: Iterable[str] = ALL_PERMISSIONS) -> Dict[str, int]:
|
|
19
|
+
expiries: Dict[str, int] = {}
|
|
20
|
+
for permission in permissions:
|
|
21
|
+
expiries[permission] = self._registry.authorization_expiry(
|
|
22
|
+
self._owner_address, self._session_key_address, permission
|
|
23
|
+
)
|
|
24
|
+
return expiries
|
|
25
|
+
|
|
26
|
+
def login(self, expiry: int, permissions: Iterable[str] = ALL_PERMISSIONS, origin: str = "unknown") -> str:
|
|
27
|
+
return self._registry.login(self._owner_address, self._session_key_address, expiry, permissions, origin)
|
|
28
|
+
|
|
29
|
+
def revoke(self, permissions: Iterable[str] = ALL_PERMISSIONS, origin: str = "unknown") -> str:
|
|
30
|
+
return self._registry.revoke(self._owner_address, self._session_key_address, permissions, origin)
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Dict, List
|
|
4
|
+
|
|
5
|
+
from eth_utils import keccak
|
|
6
|
+
|
|
7
|
+
from pynapse.core.typed_data import EIP712_TYPES
|
|
8
|
+
|
|
9
|
+
SessionKeyPermission = str
|
|
10
|
+
|
|
11
|
+
ALL_PERMISSIONS: List[SessionKeyPermission] = [
|
|
12
|
+
"CreateDataSet",
|
|
13
|
+
"AddPieces",
|
|
14
|
+
"SchedulePieceRemovals",
|
|
15
|
+
"DeleteDataSet",
|
|
16
|
+
]
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def _dependencies(types: Dict[str, List[Dict[str, str]]], primary: str) -> List[str]:
|
|
20
|
+
deps = []
|
|
21
|
+
for field in types[primary]:
|
|
22
|
+
t = field["type"].replace("[]", "")
|
|
23
|
+
if t in types and t not in deps and t != primary:
|
|
24
|
+
deps.append(t)
|
|
25
|
+
deps.extend([d for d in _dependencies(types, t) if d not in deps])
|
|
26
|
+
return deps
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def _encode_type(types: Dict[str, List[Dict[str, str]]], primary: str) -> str:
|
|
30
|
+
deps = _dependencies(types, primary)
|
|
31
|
+
deps = sorted(deps)
|
|
32
|
+
type_list = [primary] + deps
|
|
33
|
+
parts = []
|
|
34
|
+
for t in type_list:
|
|
35
|
+
fields = ",".join([f"{f['type']} {f['name']}" for f in types[t]])
|
|
36
|
+
parts.append(f"{t}({fields})")
|
|
37
|
+
return "".join(parts)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def type_hash(primary_type: str) -> str:
|
|
41
|
+
encoded = _encode_type(EIP712_TYPES, primary_type)
|
|
42
|
+
return "0x" + keccak(text=encoded).hex()
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
SESSION_KEY_PERMISSIONS: Dict[SessionKeyPermission, str] = {
|
|
46
|
+
"CreateDataSet": type_hash("CreateDataSet"),
|
|
47
|
+
"AddPieces": type_hash("AddPieces"),
|
|
48
|
+
"SchedulePieceRemovals": type_hash("SchedulePieceRemovals"),
|
|
49
|
+
"DeleteDataSet": type_hash("DeleteDataSet"),
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def get_permission_from_type_hash(type_hash_value: str) -> SessionKeyPermission:
|
|
54
|
+
for perm, h in SESSION_KEY_PERMISSIONS.items():
|
|
55
|
+
if h.lower() == type_hash_value.lower():
|
|
56
|
+
return perm
|
|
57
|
+
raise ValueError(f"Permission not found for type hash: {type_hash_value}")
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Dict, Iterable, List, Optional
|
|
4
|
+
|
|
5
|
+
from eth_account import Account
|
|
6
|
+
from web3 import AsyncWeb3, Web3
|
|
7
|
+
|
|
8
|
+
from pynapse.contracts import SESSION_KEY_REGISTRY_ABI
|
|
9
|
+
from pynapse.core.chains import Chain
|
|
10
|
+
from .permissions import SESSION_KEY_PERMISSIONS
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class SyncSessionKeyRegistry:
|
|
14
|
+
def __init__(self, web3: Web3, chain: Chain, private_key: Optional[str] = None) -> None:
|
|
15
|
+
self._web3 = web3
|
|
16
|
+
self._chain = chain
|
|
17
|
+
self._private_key = private_key
|
|
18
|
+
self._contract = web3.eth.contract(address=chain.contracts.session_key_registry, abi=SESSION_KEY_REGISTRY_ABI)
|
|
19
|
+
|
|
20
|
+
def authorization_expiry(self, address: str, session_key_address: str, permission: str) -> int:
|
|
21
|
+
perm = SESSION_KEY_PERMISSIONS[permission]
|
|
22
|
+
return int(self._contract.functions.authorizationExpiry(address, session_key_address, perm).call())
|
|
23
|
+
|
|
24
|
+
def login(self, account: str, session_key_address: str, expires_at: int, permissions: Iterable[str], origin: str) -> str:
|
|
25
|
+
if not self._private_key:
|
|
26
|
+
raise ValueError("private_key required")
|
|
27
|
+
perm_hashes = [SESSION_KEY_PERMISSIONS[p] for p in permissions]
|
|
28
|
+
txn = self._contract.functions.login(session_key_address, expires_at, perm_hashes, origin).build_transaction(
|
|
29
|
+
{
|
|
30
|
+
"from": account,
|
|
31
|
+
"nonce": self._web3.eth.get_transaction_count(account),
|
|
32
|
+
}
|
|
33
|
+
)
|
|
34
|
+
signed = self._web3.eth.account.sign_transaction(txn, private_key=self._private_key)
|
|
35
|
+
tx_hash = self._web3.eth.send_raw_transaction(signed.rawTransaction)
|
|
36
|
+
return tx_hash.hex()
|
|
37
|
+
|
|
38
|
+
def revoke(self, account: str, session_key_address: str, permissions: Iterable[str], origin: str) -> str:
|
|
39
|
+
if not self._private_key:
|
|
40
|
+
raise ValueError("private_key required")
|
|
41
|
+
perm_hashes = [SESSION_KEY_PERMISSIONS[p] for p in permissions]
|
|
42
|
+
txn = self._contract.functions.revoke(session_key_address, perm_hashes, origin).build_transaction(
|
|
43
|
+
{
|
|
44
|
+
"from": account,
|
|
45
|
+
"nonce": self._web3.eth.get_transaction_count(account),
|
|
46
|
+
}
|
|
47
|
+
)
|
|
48
|
+
signed = self._web3.eth.account.sign_transaction(txn, private_key=self._private_key)
|
|
49
|
+
tx_hash = self._web3.eth.send_raw_transaction(signed.rawTransaction)
|
|
50
|
+
return tx_hash.hex()
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class AsyncSessionKeyRegistry:
|
|
54
|
+
def __init__(self, web3: AsyncWeb3, chain: Chain, private_key: Optional[str] = None) -> None:
|
|
55
|
+
self._web3 = web3
|
|
56
|
+
self._chain = chain
|
|
57
|
+
self._private_key = private_key
|
|
58
|
+
self._contract = web3.eth.contract(address=chain.contracts.session_key_registry, abi=SESSION_KEY_REGISTRY_ABI)
|
|
59
|
+
|
|
60
|
+
async def authorization_expiry(self, address: str, session_key_address: str, permission: str) -> int:
|
|
61
|
+
perm = SESSION_KEY_PERMISSIONS[permission]
|
|
62
|
+
return int(await self._contract.functions.authorizationExpiry(address, session_key_address, perm).call())
|
|
63
|
+
|
|
64
|
+
async def login(self, account: str, session_key_address: str, expires_at: int, permissions: Iterable[str], origin: str) -> str:
|
|
65
|
+
if not self._private_key:
|
|
66
|
+
raise ValueError("private_key required")
|
|
67
|
+
perm_hashes = [SESSION_KEY_PERMISSIONS[p] for p in permissions]
|
|
68
|
+
txn = await self._contract.functions.login(session_key_address, expires_at, perm_hashes, origin).build_transaction(
|
|
69
|
+
{
|
|
70
|
+
"from": account,
|
|
71
|
+
"nonce": await self._web3.eth.get_transaction_count(account),
|
|
72
|
+
}
|
|
73
|
+
)
|
|
74
|
+
signed = Account.sign_transaction(txn, private_key=self._private_key)
|
|
75
|
+
tx_hash = await self._web3.eth.send_raw_transaction(signed.rawTransaction)
|
|
76
|
+
return tx_hash.hex()
|
|
77
|
+
|
|
78
|
+
async def revoke(self, account: str, session_key_address: str, permissions: Iterable[str], origin: str) -> str:
|
|
79
|
+
if not self._private_key:
|
|
80
|
+
raise ValueError("private_key required")
|
|
81
|
+
perm_hashes = [SESSION_KEY_PERMISSIONS[p] for p in permissions]
|
|
82
|
+
txn = await self._contract.functions.revoke(session_key_address, perm_hashes, origin).build_transaction(
|
|
83
|
+
{
|
|
84
|
+
"from": account,
|
|
85
|
+
"nonce": await self._web3.eth.get_transaction_count(account),
|
|
86
|
+
}
|
|
87
|
+
)
|
|
88
|
+
signed = Account.sign_transaction(txn, private_key=self._private_key)
|
|
89
|
+
tx_hash = await self._web3.eth.send_raw_transaction(signed.rawTransaction)
|
|
90
|
+
return tx_hash.hex()
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
from .service import AsyncSPRegistryService, SyncSPRegistryService
|
|
2
|
+
from .types import PDPOffering, ProviderInfo, ProviderRegistrationInfo, ProviderWithProduct
|
|
3
|
+
|
|
4
|
+
__all__ = [
|
|
5
|
+
"AsyncSPRegistryService",
|
|
6
|
+
"SyncSPRegistryService",
|
|
7
|
+
"ProviderInfo",
|
|
8
|
+
"ProviderWithProduct",
|
|
9
|
+
"ProviderRegistrationInfo",
|
|
10
|
+
"PDPOffering",
|
|
11
|
+
]
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Dict, List
|
|
4
|
+
|
|
5
|
+
from web3 import Web3
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def capabilities_list_to_object(keys: List[str], values: List[bytes]) -> Dict[str, str]:
|
|
9
|
+
capabilities: Dict[str, str] = {}
|
|
10
|
+
for key, value in zip(keys, values):
|
|
11
|
+
capabilities[key] = Web3.to_hex(value)
|
|
12
|
+
return capabilities
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def decode_address_capability(capability_value: bytes | str) -> str:
|
|
16
|
+
if isinstance(capability_value, str):
|
|
17
|
+
hex_value = capability_value
|
|
18
|
+
else:
|
|
19
|
+
hex_value = Web3.to_hex(capability_value)
|
|
20
|
+
|
|
21
|
+
if len(hex_value) > 42:
|
|
22
|
+
return "0x" + hex_value[-40:]
|
|
23
|
+
if len(hex_value) < 42:
|
|
24
|
+
return Web3.to_checksum_address(hex_value.rjust(42, "0"))
|
|
25
|
+
return Web3.to_checksum_address(hex_value)
|