hippius 0.1.14__py3-none-any.whl → 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {hippius-0.1.14.dist-info → hippius-0.2.0.dist-info}/METADATA +2 -2
- hippius-0.2.0.dist-info/RECORD +12 -0
- hippius_sdk/__init__.py +5 -1
- hippius_sdk/cli.py +397 -47
- hippius_sdk/client.py +20 -22
- hippius_sdk/config.py +19 -26
- hippius_sdk/ipfs.py +62 -408
- hippius_sdk/ipfs_core.py +216 -0
- hippius_sdk/substrate.py +233 -59
- hippius_sdk/utils.py +152 -0
- hippius-0.1.14.dist-info/RECORD +0 -10
- {hippius-0.1.14.dist-info → hippius-0.2.0.dist-info}/WHEEL +0 -0
- {hippius-0.1.14.dist-info → hippius-0.2.0.dist-info}/entry_points.txt +0 -0
hippius_sdk/ipfs.py
CHANGED
@@ -2,20 +2,22 @@
|
|
2
2
|
IPFS operations for the Hippius SDK.
|
3
3
|
"""
|
4
4
|
|
5
|
-
import base64
|
6
5
|
import hashlib
|
7
6
|
import json
|
8
7
|
import os
|
8
|
+
import shutil
|
9
9
|
import tempfile
|
10
10
|
import time
|
11
11
|
import uuid
|
12
|
-
from typing import Any, Dict, List, Optional
|
12
|
+
from typing import Any, Dict, List, Optional
|
13
13
|
|
14
|
-
import
|
14
|
+
import httpx
|
15
15
|
import requests
|
16
|
-
from dotenv import load_dotenv
|
17
16
|
|
18
17
|
from hippius_sdk.config import get_config_value, get_encryption_key
|
18
|
+
from hippius_sdk.ipfs_core import AsyncIPFSClient
|
19
|
+
from hippius_sdk.substrate import FileInput, SubstrateClient
|
20
|
+
from hippius_sdk.utils import format_cid, format_size
|
19
21
|
|
20
22
|
# Import PyNaCl for encryption
|
21
23
|
try:
|
@@ -70,39 +72,17 @@ class IPFSClient:
|
|
70
72
|
|
71
73
|
self.gateway = gateway.rstrip("/")
|
72
74
|
self.api_url = api_url
|
73
|
-
self.client = None
|
74
75
|
|
75
76
|
# Extract base URL from API URL for HTTP fallback
|
76
77
|
self.base_url = api_url
|
77
78
|
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
else:
|
85
|
-
# For regular HTTP URLs, we'll use the HTTP API directly
|
86
|
-
print(f"Using HTTP API at {api_url} for IPFS operations")
|
87
|
-
except ipfshttpclient.exceptions.ConnectionError as e:
|
88
|
-
print(f"Warning: Could not connect to IPFS node at {api_url}: {e}")
|
89
|
-
print(f"Falling back to HTTP API for uploads")
|
90
|
-
# We'll use HTTP API fallback for uploads
|
91
|
-
try:
|
92
|
-
# Try to connect to local IPFS daemon as fallback
|
93
|
-
self.client = ipfshttpclient.connect()
|
94
|
-
except ipfshttpclient.exceptions.ConnectionError:
|
95
|
-
# No IPFS connection available, but HTTP API fallback will be used
|
96
|
-
pass
|
97
|
-
else:
|
98
|
-
try:
|
99
|
-
# Try to connect to local IPFS daemon
|
100
|
-
self.client = ipfshttpclient.connect()
|
101
|
-
except ipfshttpclient.exceptions.ConnectionError:
|
102
|
-
# No local IPFS daemon connection available
|
103
|
-
pass
|
79
|
+
try:
|
80
|
+
self.client = AsyncIPFSClient(api_url)
|
81
|
+
except httpx.ConnectError as e:
|
82
|
+
print(f"Warning: Could not connect to IPFS node at {api_url}: {e}")
|
83
|
+
# Try to connect to local IPFS daemon as fallback
|
84
|
+
self.client = AsyncIPFSClient()
|
104
85
|
|
105
|
-
# Initialize encryption settings
|
106
86
|
self._initialize_encryption(encrypt_by_default, encryption_key)
|
107
87
|
|
108
88
|
def _initialize_encryption(
|
@@ -140,7 +120,7 @@ class IPFSClient:
|
|
140
120
|
# If encryption is requested but not available, warn the user
|
141
121
|
if self.encrypt_by_default and not self.encryption_available:
|
142
122
|
print(
|
143
|
-
|
123
|
+
"Warning: Encryption requested but not available. Check that PyNaCl is installed and a valid encryption key is provided."
|
144
124
|
)
|
145
125
|
|
146
126
|
def encrypt_data(self, data: bytes) -> bytes:
|
@@ -202,102 +182,7 @@ class IPFSClient:
|
|
202
182
|
f"Decryption failed: {str(e)}. Incorrect key or corrupted data?"
|
203
183
|
)
|
204
184
|
|
205
|
-
def
|
206
|
-
"""
|
207
|
-
Upload a file to IPFS using the HTTP API.
|
208
|
-
|
209
|
-
This is a fallback method when ipfshttpclient is not available.
|
210
|
-
|
211
|
-
Args:
|
212
|
-
file_path: Path to the file to upload
|
213
|
-
max_retries: Maximum number of retry attempts (default: 3)
|
214
|
-
|
215
|
-
Returns:
|
216
|
-
str: Content Identifier (CID) of the uploaded file
|
217
|
-
|
218
|
-
Raises:
|
219
|
-
ConnectionError: If the upload fails
|
220
|
-
"""
|
221
|
-
if not self.base_url:
|
222
|
-
raise ConnectionError("No IPFS API URL provided for HTTP upload")
|
223
|
-
|
224
|
-
# Retry logic
|
225
|
-
retries = 0
|
226
|
-
last_error = None
|
227
|
-
|
228
|
-
while retries < max_retries:
|
229
|
-
try:
|
230
|
-
# Show progress for large files
|
231
|
-
file_size = os.path.getsize(file_path)
|
232
|
-
if file_size > 1024 * 1024: # If file is larger than 1MB
|
233
|
-
print(f" Uploading {file_size/1024/1024:.2f} MB file...")
|
234
|
-
|
235
|
-
# Prepare the file for upload
|
236
|
-
with open(file_path, "rb") as file:
|
237
|
-
files = {
|
238
|
-
"file": (
|
239
|
-
os.path.basename(file_path),
|
240
|
-
file,
|
241
|
-
"application/octet-stream",
|
242
|
-
)
|
243
|
-
}
|
244
|
-
|
245
|
-
# Make HTTP POST request to the IPFS HTTP API with a timeout
|
246
|
-
print(
|
247
|
-
f" Sending request to {self.base_url}/api/v0/add... (attempt {retries+1}/{max_retries})"
|
248
|
-
)
|
249
|
-
upload_url = f"{self.base_url}/api/v0/add"
|
250
|
-
response = requests.post(
|
251
|
-
upload_url,
|
252
|
-
files=files,
|
253
|
-
timeout=120, # 2 minute timeout for uploads
|
254
|
-
)
|
255
|
-
response.raise_for_status()
|
256
|
-
|
257
|
-
# Parse the response JSON
|
258
|
-
result = response.json()
|
259
|
-
print(f" Upload successful! CID: {result['Hash']}")
|
260
|
-
return result["Hash"]
|
261
|
-
|
262
|
-
except (
|
263
|
-
requests.exceptions.Timeout,
|
264
|
-
requests.exceptions.ConnectionError,
|
265
|
-
requests.exceptions.RequestException,
|
266
|
-
) as e:
|
267
|
-
# Save the error and retry
|
268
|
-
last_error = e
|
269
|
-
retries += 1
|
270
|
-
wait_time = 2**retries # Exponential backoff: 2, 4, 8 seconds
|
271
|
-
print(f" Upload attempt {retries} failed: {str(e)}")
|
272
|
-
if retries < max_retries:
|
273
|
-
print(f" Retrying in {wait_time} seconds...")
|
274
|
-
time.sleep(wait_time)
|
275
|
-
except Exception as e:
|
276
|
-
# For other exceptions, don't retry
|
277
|
-
raise ConnectionError(f"Failed to upload file via HTTP API: {str(e)}")
|
278
|
-
|
279
|
-
# If we've exhausted all retries
|
280
|
-
if last_error:
|
281
|
-
error_type = type(last_error).__name__
|
282
|
-
if isinstance(last_error, requests.exceptions.Timeout):
|
283
|
-
raise ConnectionError(
|
284
|
-
f"Timeout when uploading to {self.base_url} after {max_retries} attempts. The server is not responding."
|
285
|
-
)
|
286
|
-
elif isinstance(last_error, requests.exceptions.ConnectionError):
|
287
|
-
raise ConnectionError(
|
288
|
-
f"Failed to connect to IPFS node at {self.base_url} after {max_retries} attempts: {str(last_error)}"
|
289
|
-
)
|
290
|
-
else:
|
291
|
-
raise ConnectionError(
|
292
|
-
f"Failed to upload file via HTTP API after {max_retries} attempts. Last error ({error_type}): {str(last_error)}"
|
293
|
-
)
|
294
|
-
|
295
|
-
# This should never happen, but just in case
|
296
|
-
raise ConnectionError(
|
297
|
-
f"Failed to upload file to {self.base_url} after {max_retries} attempts for unknown reasons."
|
298
|
-
)
|
299
|
-
|
300
|
-
def upload_file(
|
185
|
+
async def upload_file(
|
301
186
|
self,
|
302
187
|
file_path: str,
|
303
188
|
include_formatted_size: bool = True,
|
@@ -364,19 +249,8 @@ class IPFSClient:
|
|
364
249
|
# Use the original file for upload
|
365
250
|
upload_path = file_path
|
366
251
|
|
367
|
-
|
368
|
-
|
369
|
-
# Use IPFS client
|
370
|
-
result = self.client.add(upload_path)
|
371
|
-
cid = result["Hash"]
|
372
|
-
elif self.base_url:
|
373
|
-
# Fallback to using HTTP API
|
374
|
-
cid = self._upload_via_http_api(upload_path, max_retries=max_retries)
|
375
|
-
else:
|
376
|
-
# No connection or API URL available
|
377
|
-
raise ConnectionError(
|
378
|
-
"No IPFS connection available. Please provide a valid api_url or ensure a local IPFS daemon is running."
|
379
|
-
)
|
252
|
+
result = await self.client.add_file(upload_path)
|
253
|
+
cid = result["Hash"]
|
380
254
|
|
381
255
|
finally:
|
382
256
|
# Clean up temporary file if created
|
@@ -397,7 +271,7 @@ class IPFSClient:
|
|
397
271
|
|
398
272
|
return result
|
399
273
|
|
400
|
-
def upload_directory(
|
274
|
+
async def upload_directory(
|
401
275
|
self,
|
402
276
|
dir_path: str,
|
403
277
|
include_formatted_size: bool = True,
|
@@ -471,20 +345,13 @@ class IPFSClient:
|
|
471
345
|
total_size_bytes += os.path.getsize(file_path)
|
472
346
|
|
473
347
|
# Use temp_dir instead of dir_path for upload
|
474
|
-
|
475
|
-
|
476
|
-
|
477
|
-
cid = result[-1]["Hash"]
|
478
|
-
else:
|
479
|
-
cid = result["Hash"]
|
480
|
-
elif self.base_url:
|
481
|
-
cid = self._upload_directory_via_http_api(temp_dir)
|
348
|
+
result = await self.client.add_directory(temp_dir)
|
349
|
+
if isinstance(result, list):
|
350
|
+
cid = result[-1]["Hash"]
|
482
351
|
else:
|
483
|
-
|
352
|
+
cid = result["Hash"]
|
484
353
|
finally:
|
485
354
|
# Clean up the temporary directory
|
486
|
-
import shutil
|
487
|
-
|
488
355
|
shutil.rmtree(temp_dir, ignore_errors=True)
|
489
356
|
else:
|
490
357
|
# Get directory info
|
@@ -503,22 +370,13 @@ class IPFSClient:
|
|
503
370
|
pass
|
504
371
|
|
505
372
|
# Upload to IPFS
|
506
|
-
|
507
|
-
|
508
|
-
|
509
|
-
|
510
|
-
|
511
|
-
cid = result[-1]["Hash"]
|
512
|
-
else:
|
513
|
-
cid = result["Hash"]
|
514
|
-
elif self.base_url:
|
515
|
-
# Fallback to using HTTP API
|
516
|
-
cid = self._upload_directory_via_http_api(dir_path)
|
373
|
+
|
374
|
+
result = await self.client.add_directory(dir_path)
|
375
|
+
if isinstance(result, list):
|
376
|
+
# Get the last item, which should be the directory itself
|
377
|
+
cid = result[-1]["Hash"]
|
517
378
|
else:
|
518
|
-
|
519
|
-
raise ConnectionError(
|
520
|
-
"No IPFS connection available. Please provide a valid api_url or ensure a local IPFS daemon is running."
|
521
|
-
)
|
379
|
+
cid = result["Hash"]
|
522
380
|
|
523
381
|
# Get dirname in case it wasn't set (for encryption path)
|
524
382
|
dirname = os.path.basename(dir_path)
|
@@ -538,120 +396,6 @@ class IPFSClient:
|
|
538
396
|
|
539
397
|
return result
|
540
398
|
|
541
|
-
def _upload_directory_via_http_api(
|
542
|
-
self, dir_path: str, max_retries: int = 3
|
543
|
-
) -> str:
|
544
|
-
"""
|
545
|
-
Upload a directory to IPFS using the HTTP API.
|
546
|
-
|
547
|
-
This is a limited implementation and may not support all directory features.
|
548
|
-
|
549
|
-
Args:
|
550
|
-
dir_path: Path to the directory to upload
|
551
|
-
max_retries: Maximum number of retry attempts (default: 3)
|
552
|
-
|
553
|
-
Returns:
|
554
|
-
str: Content Identifier (CID) of the uploaded directory
|
555
|
-
|
556
|
-
Raises:
|
557
|
-
ConnectionError: If the upload fails
|
558
|
-
"""
|
559
|
-
if not self.base_url:
|
560
|
-
raise ConnectionError("No IPFS API URL provided for HTTP upload")
|
561
|
-
|
562
|
-
# Retry logic
|
563
|
-
retries = 0
|
564
|
-
last_error = None
|
565
|
-
|
566
|
-
while retries < max_retries:
|
567
|
-
try:
|
568
|
-
# This is a simplified approach - we'll upload the directory with recursive flag
|
569
|
-
files = []
|
570
|
-
|
571
|
-
print(f" Preparing directory contents for upload...")
|
572
|
-
# Collect all files in the directory
|
573
|
-
for root, _, filenames in os.walk(dir_path):
|
574
|
-
for filename in filenames:
|
575
|
-
file_path = os.path.join(root, filename)
|
576
|
-
rel_path = os.path.relpath(file_path, dir_path)
|
577
|
-
|
578
|
-
with open(file_path, "rb") as f:
|
579
|
-
file_content = f.read()
|
580
|
-
|
581
|
-
# Add the file to the multipart request
|
582
|
-
files.append(
|
583
|
-
(
|
584
|
-
"file",
|
585
|
-
(rel_path, file_content, "application/octet-stream"),
|
586
|
-
)
|
587
|
-
)
|
588
|
-
|
589
|
-
# Create a request with the directory flag
|
590
|
-
upload_url = f"{self.base_url}/api/v0/add?recursive=true&wrap-with-directory=true"
|
591
|
-
|
592
|
-
print(
|
593
|
-
f" Sending directory upload request to {self.base_url}/api/v0/add... (attempt {retries+1}/{max_retries})"
|
594
|
-
)
|
595
|
-
print(f" Uploading {len(files)} files...")
|
596
|
-
|
597
|
-
# Make HTTP POST request with timeout
|
598
|
-
response = requests.post(
|
599
|
-
upload_url,
|
600
|
-
files=files,
|
601
|
-
timeout=300, # 5 minute timeout for directory uploads
|
602
|
-
)
|
603
|
-
response.raise_for_status()
|
604
|
-
|
605
|
-
# The IPFS API returns a JSON object for each file, one per line
|
606
|
-
# The last one should be the directory itself
|
607
|
-
lines = response.text.strip().split("\n")
|
608
|
-
if not lines:
|
609
|
-
raise ConnectionError("Empty response from IPFS API")
|
610
|
-
|
611
|
-
last_item = json.loads(lines[-1])
|
612
|
-
print(f" Directory upload successful! CID: {last_item['Hash']}")
|
613
|
-
return last_item["Hash"]
|
614
|
-
|
615
|
-
except (
|
616
|
-
requests.exceptions.Timeout,
|
617
|
-
requests.exceptions.ConnectionError,
|
618
|
-
requests.exceptions.RequestException,
|
619
|
-
) as e:
|
620
|
-
# Save the error and retry
|
621
|
-
last_error = e
|
622
|
-
retries += 1
|
623
|
-
wait_time = 2**retries # Exponential backoff: 2, 4, 8 seconds
|
624
|
-
print(f" Upload attempt {retries} failed: {str(e)}")
|
625
|
-
if retries < max_retries:
|
626
|
-
print(f" Retrying in {wait_time} seconds...")
|
627
|
-
time.sleep(wait_time)
|
628
|
-
except Exception as e:
|
629
|
-
# For other exceptions, don't retry
|
630
|
-
raise ConnectionError(
|
631
|
-
f"Failed to upload directory via HTTP API: {str(e)}"
|
632
|
-
)
|
633
|
-
|
634
|
-
# If we've exhausted all retries
|
635
|
-
if last_error:
|
636
|
-
error_type = type(last_error).__name__
|
637
|
-
if isinstance(last_error, requests.exceptions.Timeout):
|
638
|
-
raise ConnectionError(
|
639
|
-
f"Timeout when uploading directory to {self.base_url} after {max_retries} attempts. The server is not responding."
|
640
|
-
)
|
641
|
-
elif isinstance(last_error, requests.exceptions.ConnectionError):
|
642
|
-
raise ConnectionError(
|
643
|
-
f"Failed to connect to IPFS node at {self.base_url} after {max_retries} attempts: {str(last_error)}"
|
644
|
-
)
|
645
|
-
else:
|
646
|
-
raise ConnectionError(
|
647
|
-
f"Failed to upload directory via HTTP API after {max_retries} attempts. Last error ({error_type}): {str(last_error)}"
|
648
|
-
)
|
649
|
-
|
650
|
-
# This should never happen, but just in case
|
651
|
-
raise ConnectionError(
|
652
|
-
f"Failed to upload directory to {self.base_url} after {max_retries} attempts for unknown reasons."
|
653
|
-
)
|
654
|
-
|
655
399
|
def format_size(self, size_bytes: int) -> str:
|
656
400
|
"""
|
657
401
|
Format a size in bytes to a human-readable string.
|
@@ -662,14 +406,7 @@ class IPFSClient:
|
|
662
406
|
Returns:
|
663
407
|
str: Human-readable size string (e.g., '1.23 MB', '456.78 KB')
|
664
408
|
"""
|
665
|
-
|
666
|
-
return f"{size_bytes / (1024 * 1024 * 1024):.2f} GB"
|
667
|
-
elif size_bytes >= 1024 * 1024:
|
668
|
-
return f"{size_bytes / (1024 * 1024):.2f} MB"
|
669
|
-
elif size_bytes >= 1024:
|
670
|
-
return f"{size_bytes / 1024:.2f} KB"
|
671
|
-
else:
|
672
|
-
return f"{size_bytes} bytes"
|
409
|
+
return format_size(size_bytes)
|
673
410
|
|
674
411
|
def format_cid(self, cid: str) -> str:
|
675
412
|
"""
|
@@ -683,54 +420,9 @@ class IPFSClient:
|
|
683
420
|
Returns:
|
684
421
|
str: Formatted CID string
|
685
422
|
"""
|
686
|
-
|
687
|
-
if cid.startswith(("Qm", "bafy", "bafk", "bafyb", "bafzb", "b")):
|
688
|
-
return cid
|
423
|
+
return format_cid(cid)
|
689
424
|
|
690
|
-
|
691
|
-
if all(c in "0123456789abcdefABCDEF" for c in cid):
|
692
|
-
# First try the special case where the hex string is actually ASCII encoded
|
693
|
-
try:
|
694
|
-
# Try to decode the hex as ASCII characters
|
695
|
-
hex_bytes = bytes.fromhex(cid)
|
696
|
-
ascii_str = hex_bytes.decode("ascii")
|
697
|
-
|
698
|
-
# If the decoded string starts with a valid CID prefix, return it
|
699
|
-
if ascii_str.startswith(("Qm", "bafy", "bafk", "bafyb", "bafzb", "b")):
|
700
|
-
return ascii_str
|
701
|
-
except Exception:
|
702
|
-
pass
|
703
|
-
|
704
|
-
# If the above doesn't work, try the standard CID decoding
|
705
|
-
try:
|
706
|
-
import binascii
|
707
|
-
|
708
|
-
import base58
|
709
|
-
|
710
|
-
# Try to decode hex to binary then to base58 for CIDv0
|
711
|
-
try:
|
712
|
-
binary_data = binascii.unhexlify(cid)
|
713
|
-
if (
|
714
|
-
len(binary_data) > 2
|
715
|
-
and binary_data[0] == 0x12
|
716
|
-
and binary_data[1] == 0x20
|
717
|
-
):
|
718
|
-
# This looks like a CIDv0 (Qm...)
|
719
|
-
decoded_cid = base58.b58encode(binary_data).decode("utf-8")
|
720
|
-
return decoded_cid
|
721
|
-
except Exception:
|
722
|
-
pass
|
723
|
-
|
724
|
-
# If not successful, just return hex with 0x prefix as fallback
|
725
|
-
return f"0x{cid}"
|
726
|
-
except ImportError:
|
727
|
-
# If base58 is not available, return hex with prefix
|
728
|
-
return f"0x{cid}"
|
729
|
-
|
730
|
-
# Default case - return as is
|
731
|
-
return cid
|
732
|
-
|
733
|
-
def download_file(
|
425
|
+
async def download_file(
|
734
426
|
self,
|
735
427
|
cid: str,
|
736
428
|
output_path: str,
|
@@ -858,7 +550,7 @@ class IPFSClient:
|
|
858
550
|
if temp_file_path and os.path.exists(temp_file_path):
|
859
551
|
os.unlink(temp_file_path)
|
860
552
|
|
861
|
-
def cat(
|
553
|
+
async def cat(
|
862
554
|
self,
|
863
555
|
cid: str,
|
864
556
|
max_display_bytes: int = 1024,
|
@@ -898,14 +590,7 @@ class IPFSClient:
|
|
898
590
|
"Decryption requested but not available. Check that PyNaCl is installed and a valid encryption key is provided."
|
899
591
|
)
|
900
592
|
|
901
|
-
|
902
|
-
if self.client:
|
903
|
-
content = self.client.cat(cid)
|
904
|
-
else:
|
905
|
-
url = f"{self.gateway}/ipfs/{cid}"
|
906
|
-
response = requests.get(url)
|
907
|
-
response.raise_for_status()
|
908
|
-
content = response.content
|
593
|
+
content = await self.client.cat(cid)
|
909
594
|
|
910
595
|
# Decrypt if needed
|
911
596
|
if should_decrypt:
|
@@ -940,7 +625,7 @@ class IPFSClient:
|
|
940
625
|
|
941
626
|
return result
|
942
627
|
|
943
|
-
def exists(self, cid: str) -> Dict[str, Any]:
|
628
|
+
async def exists(self, cid: str) -> Dict[str, Any]:
|
944
629
|
"""
|
945
630
|
Check if a CID exists on IPFS.
|
946
631
|
|
@@ -956,19 +641,7 @@ class IPFSClient:
|
|
956
641
|
"""
|
957
642
|
formatted_cid = self.format_cid(cid)
|
958
643
|
gateway_url = f"{self.gateway}/ipfs/{cid}"
|
959
|
-
|
960
|
-
try:
|
961
|
-
if self.client:
|
962
|
-
# We'll try to get the file stats
|
963
|
-
self.client.ls(cid)
|
964
|
-
exists = True
|
965
|
-
else:
|
966
|
-
# Try to access through gateway
|
967
|
-
url = f"{self.gateway}/ipfs/{cid}"
|
968
|
-
response = requests.head(url)
|
969
|
-
exists = response.status_code == 200
|
970
|
-
except (ipfshttpclient.exceptions.ErrorResponse, requests.RequestException):
|
971
|
-
exists = False
|
644
|
+
exists = await self.client.ls(cid)
|
972
645
|
|
973
646
|
return {
|
974
647
|
"exists": exists,
|
@@ -977,7 +650,7 @@ class IPFSClient:
|
|
977
650
|
"gateway_url": gateway_url if exists else None,
|
978
651
|
}
|
979
652
|
|
980
|
-
def pin(self, cid: str) -> Dict[str, Any]:
|
653
|
+
async def pin(self, cid: str) -> Dict[str, Any]:
|
981
654
|
"""
|
982
655
|
Pin a CID to IPFS to keep it available.
|
983
656
|
|
@@ -996,31 +669,15 @@ class IPFSClient:
|
|
996
669
|
"""
|
997
670
|
formatted_cid = self.format_cid(cid)
|
998
671
|
|
999
|
-
if not self.client and self.base_url:
|
1000
|
-
# Try using HTTP API for pinning
|
1001
|
-
try:
|
1002
|
-
url = f"{self.base_url}/api/v0/pin/add?arg={cid}"
|
1003
|
-
response = requests.post(url)
|
1004
|
-
response.raise_for_status()
|
1005
|
-
success = True
|
1006
|
-
message = "Successfully pinned via HTTP API"
|
1007
|
-
except requests.RequestException as e:
|
1008
|
-
success = False
|
1009
|
-
message = f"Failed to pin: {str(e)}"
|
1010
|
-
elif not self.client:
|
1011
|
-
raise ConnectionError(
|
1012
|
-
"No IPFS connection available. Please provide a valid api_url or ensure a local IPFS daemon is running."
|
1013
|
-
)
|
1014
|
-
|
1015
672
|
try:
|
1016
673
|
if self.client:
|
1017
|
-
self.client.pin
|
674
|
+
await self.client.pin(cid)
|
1018
675
|
success = True
|
1019
676
|
message = "Successfully pinned"
|
1020
677
|
else:
|
1021
678
|
success = False
|
1022
679
|
message = "No IPFS client available"
|
1023
|
-
except
|
680
|
+
except httpx.HTTPError as e:
|
1024
681
|
success = False
|
1025
682
|
message = f"Failed to pin: {str(e)}"
|
1026
683
|
|
@@ -1031,7 +688,7 @@ class IPFSClient:
|
|
1031
688
|
"message": message,
|
1032
689
|
}
|
1033
690
|
|
1034
|
-
def erasure_code_file(
|
691
|
+
async def erasure_code_file(
|
1035
692
|
self,
|
1036
693
|
file_path: str,
|
1037
694
|
k: int = 3,
|
@@ -1092,7 +749,7 @@ class IPFSClient:
|
|
1092
749
|
file_id = str(uuid.uuid4())
|
1093
750
|
|
1094
751
|
if verbose:
|
1095
|
-
print(f"Processing file: {file_name} ({file_size/1024/1024:.2f} MB)")
|
752
|
+
print(f"Processing file: {file_name} ({file_size / 1024 / 1024:.2f} MB)")
|
1096
753
|
print(
|
1097
754
|
f"Erasure coding parameters: k={k}, m={m} (need {k}/{m} chunks to reconstruct)"
|
1098
755
|
)
|
@@ -1226,7 +883,7 @@ class IPFSClient:
|
|
1226
883
|
all_encoded_chunks.append(encoded_chunks)
|
1227
884
|
|
1228
885
|
if verbose and (i + 1) % 10 == 0:
|
1229
|
-
print(f" Encoded {i+1}/{len(chunks)} chunks")
|
886
|
+
print(f" Encoded {i + 1}/{len(chunks)} chunks")
|
1230
887
|
except Exception as e:
|
1231
888
|
# If encoding fails, provide more helpful error message
|
1232
889
|
error_msg = f"Error encoding chunk {i}: {str(e)}"
|
@@ -1258,7 +915,7 @@ class IPFSClient:
|
|
1258
915
|
|
1259
916
|
# Upload the chunk to IPFS
|
1260
917
|
try:
|
1261
|
-
chunk_cid = self.upload_file(
|
918
|
+
chunk_cid = await self.upload_file(
|
1262
919
|
chunk_path, max_retries=max_retries
|
1263
920
|
)
|
1264
921
|
|
@@ -1289,10 +946,10 @@ class IPFSClient:
|
|
1289
946
|
json.dump(metadata, f, indent=2)
|
1290
947
|
|
1291
948
|
if verbose:
|
1292
|
-
print(
|
949
|
+
print("Uploading metadata file...")
|
1293
950
|
|
1294
951
|
# Upload the metadata file to IPFS
|
1295
|
-
metadata_cid_result = self.upload_file(
|
952
|
+
metadata_cid_result = await self.upload_file(
|
1296
953
|
metadata_path, max_retries=max_retries
|
1297
954
|
)
|
1298
955
|
|
@@ -1301,15 +958,15 @@ class IPFSClient:
|
|
1301
958
|
metadata["metadata_cid"] = metadata_cid
|
1302
959
|
|
1303
960
|
if verbose:
|
1304
|
-
print(
|
961
|
+
print("Erasure coding complete!")
|
1305
962
|
print(f"Metadata CID: {metadata_cid}")
|
1306
|
-
print(f"Original file size: {file_size/1024/1024:.2f} MB")
|
963
|
+
print(f"Original file size: {file_size / 1024 / 1024:.2f} MB")
|
1307
964
|
print(f"Total chunks: {len(chunks) * m}")
|
1308
965
|
print(f"Minimum chunks needed: {k * len(chunks)}")
|
1309
966
|
|
1310
967
|
return metadata
|
1311
968
|
|
1312
|
-
def reconstruct_from_erasure_code(
|
969
|
+
async def reconstruct_from_erasure_code(
|
1313
970
|
self,
|
1314
971
|
metadata_cid: str,
|
1315
972
|
output_file: str,
|
@@ -1355,7 +1012,9 @@ class IPFSClient:
|
|
1355
1012
|
print(f"Downloading metadata file (CID: {metadata_cid})...")
|
1356
1013
|
|
1357
1014
|
metadata_path = os.path.join(temp_dir, "metadata.json")
|
1358
|
-
self.download_file(
|
1015
|
+
await self.download_file(
|
1016
|
+
metadata_cid, metadata_path, max_retries=max_retries
|
1017
|
+
)
|
1359
1018
|
|
1360
1019
|
if verbose:
|
1361
1020
|
metadata_download_time = time.time() - start_time
|
@@ -1376,13 +1035,13 @@ class IPFSClient:
|
|
1376
1035
|
|
1377
1036
|
if verbose:
|
1378
1037
|
print(
|
1379
|
-
f"File: {original_file['name']} ({original_file['size']/1024/1024:.2f} MB)"
|
1038
|
+
f"File: {original_file['name']} ({original_file['size'] / 1024 / 1024:.2f} MB)"
|
1380
1039
|
)
|
1381
1040
|
print(
|
1382
1041
|
f"Erasure coding parameters: k={k}, m={m} (need {k} of {m} chunks to reconstruct)"
|
1383
1042
|
)
|
1384
1043
|
if is_encrypted:
|
1385
|
-
print(
|
1044
|
+
print("Encrypted: Yes")
|
1386
1045
|
|
1387
1046
|
# Step 3: Group chunks by their original chunk index
|
1388
1047
|
chunks_by_original = {}
|
@@ -1429,7 +1088,7 @@ class IPFSClient:
|
|
1429
1088
|
if isinstance(chunk["cid"], dict) and "cid" in chunk["cid"]
|
1430
1089
|
else chunk["cid"]
|
1431
1090
|
)
|
1432
|
-
self.download_file(
|
1091
|
+
await self.download_file(
|
1433
1092
|
chunk_cid, chunk_path, max_retries=max_retries
|
1434
1093
|
)
|
1435
1094
|
chunks_downloaded += 1
|
@@ -1487,7 +1146,7 @@ class IPFSClient:
|
|
1487
1146
|
|
1488
1147
|
# Step 5: Combine the reconstructed chunks into a file
|
1489
1148
|
if verbose:
|
1490
|
-
print(
|
1149
|
+
print("Combining reconstructed chunks...")
|
1491
1150
|
|
1492
1151
|
# Concatenate all chunks
|
1493
1152
|
file_data = b"".join(reconstructed_chunks)
|
@@ -1505,7 +1164,7 @@ class IPFSClient:
|
|
1505
1164
|
)
|
1506
1165
|
|
1507
1166
|
if verbose:
|
1508
|
-
print(
|
1167
|
+
print("Decrypting file data...")
|
1509
1168
|
|
1510
1169
|
file_data = self.decrypt_data(file_data)
|
1511
1170
|
|
@@ -1519,11 +1178,11 @@ class IPFSClient:
|
|
1519
1178
|
expected_hash = original_file["hash"]
|
1520
1179
|
|
1521
1180
|
if actual_hash != expected_hash:
|
1522
|
-
print(
|
1181
|
+
print("Warning: File hash mismatch!")
|
1523
1182
|
print(f" Expected: {expected_hash}")
|
1524
1183
|
print(f" Actual: {actual_hash}")
|
1525
1184
|
elif verbose:
|
1526
|
-
print(
|
1185
|
+
print("Hash verification successful!")
|
1527
1186
|
|
1528
1187
|
total_time = time.time() - start_time
|
1529
1188
|
if verbose:
|
@@ -1537,7 +1196,7 @@ class IPFSClient:
|
|
1537
1196
|
if temp_dir_obj is not None:
|
1538
1197
|
temp_dir_obj.cleanup()
|
1539
1198
|
|
1540
|
-
def store_erasure_coded_file(
|
1199
|
+
async def store_erasure_coded_file(
|
1541
1200
|
self,
|
1542
1201
|
file_path: str,
|
1543
1202
|
k: int = 3,
|
@@ -1573,7 +1232,7 @@ class IPFSClient:
|
|
1573
1232
|
RuntimeError: If processing fails
|
1574
1233
|
"""
|
1575
1234
|
# Step 1: Erasure code the file and upload chunks
|
1576
|
-
metadata = self.erasure_code_file(
|
1235
|
+
metadata = await self.erasure_code_file(
|
1577
1236
|
file_path=file_path,
|
1578
1237
|
k=k,
|
1579
1238
|
m=m,
|
@@ -1583,14 +1242,9 @@ class IPFSClient:
|
|
1583
1242
|
verbose=verbose,
|
1584
1243
|
)
|
1585
1244
|
|
1586
|
-
# Step 2:
|
1245
|
+
# Step 2: Create substrate client if we need it
|
1587
1246
|
if substrate_client is None:
|
1588
|
-
from hippius_sdk.substrate import FileInput, SubstrateClient
|
1589
|
-
|
1590
1247
|
substrate_client = SubstrateClient()
|
1591
|
-
else:
|
1592
|
-
# Just get the FileInput class
|
1593
|
-
from hippius_sdk.substrate import FileInput
|
1594
1248
|
|
1595
1249
|
original_file = metadata["original_file"]
|
1596
1250
|
metadata_cid = metadata["metadata_cid"]
|
@@ -1612,7 +1266,7 @@ class IPFSClient:
|
|
1612
1266
|
|
1613
1267
|
# Step 4: Add all chunks to the storage request
|
1614
1268
|
if verbose:
|
1615
|
-
print(
|
1269
|
+
print("Adding all chunks to storage request...")
|
1616
1270
|
|
1617
1271
|
for i, chunk in enumerate(metadata["chunks"]):
|
1618
1272
|
# Extract the CID string from the chunk's cid dictionary
|
@@ -1637,12 +1291,12 @@ class IPFSClient:
|
|
1637
1291
|
f"Submitting storage request for 1 metadata file and {len(metadata['chunks'])} chunks..."
|
1638
1292
|
)
|
1639
1293
|
|
1640
|
-
tx_hash = substrate_client.storage_request(
|
1294
|
+
tx_hash = await substrate_client.storage_request(
|
1641
1295
|
files=all_file_inputs, miner_ids=miner_ids
|
1642
1296
|
)
|
1643
1297
|
|
1644
1298
|
if verbose:
|
1645
|
-
print(
|
1299
|
+
print("Successfully stored all files in marketplace!")
|
1646
1300
|
print(f"Transaction hash: {tx_hash}")
|
1647
1301
|
print(f"Metadata CID: {metadata_cid}")
|
1648
1302
|
print(
|