hippius 0.2.9__py3-none-any.whl → 0.2.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {hippius-0.2.9.dist-info → hippius-0.2.11.dist-info}/METADATA +1 -1
- hippius-0.2.11.dist-info/RECORD +17 -0
- hippius_sdk/__init__.py +1 -1
- hippius_sdk/cli_handlers.py +77 -51
- hippius_sdk/client.py +63 -15
- hippius_sdk/config.py +14 -2
- hippius_sdk/ipfs.py +159 -114
- hippius_sdk/ipfs_core.py +1 -1
- hippius_sdk/substrate.py +123 -79
- hippius_sdk/utils.py +14 -8
- hippius-0.2.9.dist-info/RECORD +0 -17
- {hippius-0.2.9.dist-info → hippius-0.2.11.dist-info}/WHEEL +0 -0
- {hippius-0.2.9.dist-info → hippius-0.2.11.dist-info}/entry_points.txt +0 -0
hippius_sdk/ipfs.py
CHANGED
@@ -13,16 +13,8 @@ import uuid
|
|
13
13
|
from typing import Any, Callable, Dict, List, Optional
|
14
14
|
|
15
15
|
import httpx
|
16
|
-
import requests
|
17
16
|
|
18
17
|
from hippius_sdk.config import get_config_value, get_encryption_key
|
19
|
-
from hippius_sdk.errors import (
|
20
|
-
HippiusAlreadyDeletedError,
|
21
|
-
HippiusFailedIPFSUnpin,
|
22
|
-
HippiusFailedSubstrateDelete,
|
23
|
-
HippiusIPFSConnectionError,
|
24
|
-
HippiusMetadataError,
|
25
|
-
)
|
26
18
|
from hippius_sdk.ipfs_core import AsyncIPFSClient
|
27
19
|
from hippius_sdk.substrate import FileInput, SubstrateClient
|
28
20
|
from hippius_sdk.utils import format_cid, format_size
|
@@ -203,6 +195,7 @@ class IPFSClient:
|
|
203
195
|
include_formatted_size: bool = True,
|
204
196
|
encrypt: Optional[bool] = None,
|
205
197
|
max_retries: int = 3,
|
198
|
+
seed_phrase: Optional[str] = None,
|
206
199
|
) -> Dict[str, Any]:
|
207
200
|
"""
|
208
201
|
Upload a file to IPFS with optional encryption.
|
@@ -212,6 +205,7 @@ class IPFSClient:
|
|
212
205
|
include_formatted_size: Whether to include formatted size in the result (default: True)
|
213
206
|
encrypt: Whether to encrypt the file (overrides default)
|
214
207
|
max_retries: Maximum number of retry attempts (default: 3)
|
208
|
+
seed_phrase: Optional seed phrase to use for blockchain interactions (uses config if None)
|
215
209
|
|
216
210
|
Returns:
|
217
211
|
Dict[str, Any]: Dictionary containing:
|
@@ -291,6 +285,7 @@ class IPFSClient:
|
|
291
285
|
dir_path: str,
|
292
286
|
include_formatted_size: bool = True,
|
293
287
|
encrypt: Optional[bool] = None,
|
288
|
+
seed_phrase: Optional[str] = None,
|
294
289
|
) -> Dict[str, Any]:
|
295
290
|
"""
|
296
291
|
Upload a directory to IPFS with optional encryption of files.
|
@@ -299,6 +294,7 @@ class IPFSClient:
|
|
299
294
|
dir_path: Path to the directory to upload
|
300
295
|
include_formatted_size: Whether to include formatted size in the result (default: True)
|
301
296
|
encrypt: Whether to encrypt files (overrides default)
|
297
|
+
seed_phrase: Optional seed phrase to use for blockchain interactions (uses config if None)
|
302
298
|
|
303
299
|
Returns:
|
304
300
|
Dict[str, Any]: Dictionary containing:
|
@@ -441,20 +437,21 @@ class IPFSClient:
|
|
441
437
|
self,
|
442
438
|
cid: str,
|
443
439
|
output_path: str,
|
444
|
-
|
440
|
+
_: Optional[bool] = None,
|
445
441
|
max_retries: int = 3,
|
442
|
+
seed_phrase: Optional[str] = None,
|
446
443
|
skip_directory_check: bool = False,
|
447
444
|
) -> Dict[str, Any]:
|
448
445
|
"""
|
449
446
|
Download a file from IPFS with optional decryption.
|
450
|
-
Supports downloading directories - in that case, a directory structure will be created.
|
451
447
|
|
452
448
|
Args:
|
453
449
|
cid: Content Identifier (CID) of the file to download
|
454
|
-
output_path: Path where the downloaded file
|
455
|
-
|
450
|
+
output_path: Path where the downloaded file will be saved
|
451
|
+
_: Whether to decrypt the file (overrides default)
|
456
452
|
max_retries: Maximum number of retry attempts (default: 3)
|
457
|
-
|
453
|
+
seed_phrase: Optional seed phrase to use for blockchain interactions (uses config if None)
|
454
|
+
skip_directory_check: Whether to skip checking if the CID is a directory (default: False)
|
458
455
|
|
459
456
|
Returns:
|
460
457
|
Dict[str, Any]: Dictionary containing download results:
|
@@ -464,125 +461,94 @@ class IPFSClient:
|
|
464
461
|
- size_formatted: Human-readable file size
|
465
462
|
- elapsed_seconds: Time taken for the download in seconds
|
466
463
|
- decrypted: Whether the file was decrypted
|
467
|
-
- is_directory: Whether the download was a directory
|
468
464
|
|
469
465
|
Raises:
|
470
466
|
requests.RequestException: If the download fails
|
471
467
|
ValueError: If decryption is requested but fails
|
472
468
|
"""
|
473
469
|
start_time = time.time()
|
474
|
-
|
475
|
-
# Skip directory check if requested (important for erasure code chunks)
|
476
470
|
is_directory = False
|
471
|
+
|
472
|
+
# Check if this is a directory (unless skip_directory_check is True)
|
477
473
|
if not skip_directory_check:
|
478
|
-
# Use the improved ls function to properly detect directories
|
479
474
|
try:
|
480
|
-
# The ls function now properly detects directories
|
481
475
|
ls_result = await self.client.ls(cid)
|
482
|
-
|
476
|
+
if isinstance(ls_result, dict) and ls_result.get("Objects", []):
|
477
|
+
# Check if we have Links in the object, which means it's a directory
|
478
|
+
for obj in ls_result["Objects"]:
|
479
|
+
if obj.get("Links", []):
|
480
|
+
is_directory = True
|
481
|
+
break
|
483
482
|
except Exception:
|
484
|
-
# If ls fails,
|
483
|
+
# If ls check fails, continue treating as a regular file
|
485
484
|
pass
|
486
485
|
|
487
|
-
#
|
486
|
+
# Handle based on whether it's a directory or file
|
488
487
|
if is_directory:
|
489
|
-
# For directories, we don't need to decrypt each file during the initial download
|
490
|
-
# We'll use the AsyncIPFSClient's download_directory method directly
|
491
488
|
try:
|
492
|
-
|
489
|
+
# Use the AsyncIPFSClient's directory handling method
|
490
|
+
os.makedirs(
|
491
|
+
os.path.dirname(os.path.abspath(output_path)), exist_ok=True
|
492
|
+
)
|
493
|
+
download_result = await self.client.get(cid, output_path)
|
494
|
+
downloaded_size = 0
|
493
495
|
|
494
|
-
#
|
495
|
-
total_size = 0
|
496
|
+
# Walk through the downloaded directory to calculate total size
|
496
497
|
for root, _, files in os.walk(output_path):
|
497
498
|
for file in files:
|
498
499
|
file_path = os.path.join(root, file)
|
499
|
-
|
500
|
-
|
501
|
-
elapsed_time = time.time() - start_time
|
500
|
+
downloaded_size += os.path.getsize(file_path)
|
502
501
|
|
502
|
+
# Return success
|
503
503
|
return {
|
504
504
|
"success": True,
|
505
505
|
"output_path": output_path,
|
506
|
-
"size_bytes":
|
507
|
-
"size_formatted": self.format_size(
|
508
|
-
"elapsed_seconds":
|
509
|
-
"decrypted": False,
|
506
|
+
"size_bytes": downloaded_size,
|
507
|
+
"size_formatted": self.format_size(downloaded_size),
|
508
|
+
"elapsed_seconds": time.time() - start_time,
|
509
|
+
"decrypted": False,
|
510
510
|
"is_directory": True,
|
511
511
|
}
|
512
512
|
except Exception as e:
|
513
513
|
raise RuntimeError(f"Failed to download directory: {str(e)}")
|
514
|
-
|
515
|
-
|
516
|
-
|
517
|
-
|
518
|
-
|
519
|
-
# Check if decryption is available if requested
|
520
|
-
if should_decrypt and not self.encryption_available:
|
521
|
-
raise ValueError(
|
522
|
-
"Decryption requested but not available. Check that PyNaCl is installed and a valid encryption key is provided."
|
523
|
-
)
|
524
|
-
|
525
|
-
# Create a temporary file if we'll be decrypting
|
526
|
-
temp_file_path = None
|
527
|
-
try:
|
528
|
-
if should_decrypt:
|
529
|
-
# Create a temporary file for the encrypted data
|
530
|
-
temp_file = tempfile.NamedTemporaryFile(delete=False)
|
531
|
-
temp_file_path = temp_file.name
|
532
|
-
temp_file.close()
|
533
|
-
download_path = temp_file_path
|
534
|
-
else:
|
535
|
-
download_path = output_path
|
536
|
-
|
537
|
-
# Pass the skip_directory_check parameter to the core client
|
538
|
-
await self.client.download_file(
|
539
|
-
cid, download_path, skip_directory_check=skip_directory_check
|
540
|
-
)
|
541
|
-
download_success = True
|
542
|
-
|
543
|
-
if not download_success:
|
544
|
-
raise RuntimeError("Failed to download file after multiple attempts")
|
545
|
-
|
546
|
-
# Decrypt if needed
|
547
|
-
if should_decrypt:
|
514
|
+
else:
|
515
|
+
# Regular file download
|
516
|
+
retries = 0
|
517
|
+
while retries < max_retries:
|
548
518
|
try:
|
549
|
-
|
550
|
-
with
|
551
|
-
|
519
|
+
url = f"{self.gateway}/ipfs/{cid}"
|
520
|
+
async with self.client.client.stream(
|
521
|
+
url=url, method="GET"
|
522
|
+
) as response:
|
523
|
+
response.raise_for_status()
|
524
|
+
|
525
|
+
with open(output_path, "wb") as f:
|
526
|
+
async for chunk in response.aiter_bytes(chunk_size=8192):
|
527
|
+
f.write(chunk)
|
528
|
+
break
|
529
|
+
|
530
|
+
except (httpx.HTTPError, IOError) as e:
|
531
|
+
retries += 1
|
532
|
+
|
533
|
+
if retries < max_retries:
|
534
|
+
wait_time = 2**retries
|
535
|
+
print(f"Download attempt {retries} failed: {str(e)}")
|
536
|
+
print(f"Retrying in {wait_time} seconds...")
|
537
|
+
time.sleep(wait_time)
|
538
|
+
else:
|
539
|
+
raise
|
540
|
+
|
541
|
+
file_size_bytes = os.path.getsize(output_path)
|
542
|
+
elapsed_time = time.time() - start_time
|
552
543
|
|
553
|
-
|
554
|
-
|
555
|
-
|
556
|
-
|
557
|
-
|
558
|
-
|
559
|
-
|
560
|
-
|
561
|
-
f.write(decrypted_data)
|
562
|
-
|
563
|
-
# Use output_path for size measurement
|
564
|
-
file_size_bytes = len(decrypted_data)
|
565
|
-
except Exception as e:
|
566
|
-
raise ValueError(f"Failed to decrypt file: {str(e)}")
|
567
|
-
else:
|
568
|
-
file_size_bytes = os.path.getsize(output_path)
|
569
|
-
|
570
|
-
elapsed_time = time.time() - start_time
|
571
|
-
|
572
|
-
return {
|
573
|
-
"success": True,
|
574
|
-
"output_path": output_path,
|
575
|
-
"size_bytes": file_size_bytes,
|
576
|
-
"size_formatted": self.format_size(file_size_bytes),
|
577
|
-
"elapsed_seconds": round(elapsed_time, 2),
|
578
|
-
"decrypted": should_decrypt,
|
579
|
-
"is_directory": False,
|
580
|
-
}
|
581
|
-
|
582
|
-
finally:
|
583
|
-
# Clean up temporary file if created
|
584
|
-
if temp_file_path and os.path.exists(temp_file_path):
|
585
|
-
os.unlink(temp_file_path)
|
544
|
+
return {
|
545
|
+
"success": True,
|
546
|
+
"output_path": output_path,
|
547
|
+
"size_bytes": file_size_bytes,
|
548
|
+
"size_formatted": self.format_size(file_size_bytes),
|
549
|
+
"elapsed_seconds": round(elapsed_time, 2),
|
550
|
+
"decrypted": _,
|
551
|
+
}
|
586
552
|
|
587
553
|
async def cat(
|
588
554
|
self,
|
@@ -590,6 +556,7 @@ class IPFSClient:
|
|
590
556
|
max_display_bytes: int = 1024,
|
591
557
|
format_output: bool = True,
|
592
558
|
decrypt: Optional[bool] = None,
|
559
|
+
seed_phrase: Optional[str] = None,
|
593
560
|
) -> Dict[str, Any]:
|
594
561
|
"""
|
595
562
|
Get the content of a file from IPFS with optional decryption.
|
@@ -599,6 +566,7 @@ class IPFSClient:
|
|
599
566
|
max_display_bytes: Maximum number of bytes to include in the preview (default: 1024)
|
600
567
|
format_output: Whether to attempt to decode the content as text (default: True)
|
601
568
|
decrypt: Whether to decrypt the file (overrides default)
|
569
|
+
seed_phrase: Optional seed phrase to use for blockchain interactions (uses config if None)
|
602
570
|
|
603
571
|
Returns:
|
604
572
|
Dict[str, Any]: Dictionary containing:
|
@@ -659,12 +627,15 @@ class IPFSClient:
|
|
659
627
|
|
660
628
|
return result
|
661
629
|
|
662
|
-
async def exists(
|
630
|
+
async def exists(
|
631
|
+
self, cid: str, seed_phrase: Optional[str] = None
|
632
|
+
) -> Dict[str, Any]:
|
663
633
|
"""
|
664
634
|
Check if a CID exists on IPFS.
|
665
635
|
|
666
636
|
Args:
|
667
637
|
cid: Content Identifier (CID) to check
|
638
|
+
seed_phrase: Optional seed phrase to use for blockchain interactions (uses config if None)
|
668
639
|
|
669
640
|
Returns:
|
670
641
|
Dict[str, Any]: Dictionary containing:
|
@@ -684,7 +655,9 @@ class IPFSClient:
|
|
684
655
|
"gateway_url": gateway_url if exists else None,
|
685
656
|
}
|
686
657
|
|
687
|
-
async def publish_global(
|
658
|
+
async def publish_global(
|
659
|
+
self, cid: str, seed_phrase: Optional[str] = None
|
660
|
+
) -> Dict[str, Any]:
|
688
661
|
"""
|
689
662
|
Publish a CID to the global IPFS network, ensuring it's widely available.
|
690
663
|
|
@@ -693,6 +666,7 @@ class IPFSClient:
|
|
693
666
|
|
694
667
|
Args:
|
695
668
|
cid: Content Identifier (CID) to publish globally
|
669
|
+
seed_phrase: Optional seed phrase to use for blockchain interactions (uses config if None)
|
696
670
|
|
697
671
|
Returns:
|
698
672
|
Dict[str, Any]: Dictionary containing:
|
@@ -702,7 +676,7 @@ class IPFSClient:
|
|
702
676
|
- message: Status message
|
703
677
|
"""
|
704
678
|
# First ensure it's pinned locally
|
705
|
-
pin_result = await self.pin(cid)
|
679
|
+
pin_result = await self.pin(cid, seed_phrase=seed_phrase)
|
706
680
|
|
707
681
|
if not pin_result.get("success", False):
|
708
682
|
return {
|
@@ -722,12 +696,13 @@ class IPFSClient:
|
|
722
696
|
"message": "Content published to global IPFS network",
|
723
697
|
}
|
724
698
|
|
725
|
-
async def pin(self, cid: str) -> Dict[str, Any]:
|
699
|
+
async def pin(self, cid: str, seed_phrase: Optional[str] = None) -> Dict[str, Any]:
|
726
700
|
"""
|
727
701
|
Pin a CID to IPFS to keep it available.
|
728
702
|
|
729
703
|
Args:
|
730
704
|
cid: Content Identifier (CID) to pin
|
705
|
+
seed_phrase: Optional seed phrase to use for blockchain interactions (uses config if None)
|
731
706
|
|
732
707
|
Returns:
|
733
708
|
Dict[str, Any]: Dictionary containing:
|
@@ -770,6 +745,7 @@ class IPFSClient:
|
|
770
745
|
max_retries: int = 3,
|
771
746
|
verbose: bool = True,
|
772
747
|
progress_callback: Optional[Callable[[str, int, int], None]] = None,
|
748
|
+
seed_phrase: Optional[str] = None,
|
773
749
|
) -> Dict[str, Any]:
|
774
750
|
"""
|
775
751
|
Split a file using erasure coding, then upload the chunks to IPFS.
|
@@ -789,6 +765,7 @@ class IPFSClient:
|
|
789
765
|
verbose: Whether to print progress information
|
790
766
|
progress_callback: Optional callback function for progress updates
|
791
767
|
Function receives (stage_name, current, total)
|
768
|
+
seed_phrase: Optional seed phrase to use for blockchain interactions (uses config if None)
|
792
769
|
|
793
770
|
Returns:
|
794
771
|
dict: Metadata including the original file info and chunk information
|
@@ -1100,6 +1077,7 @@ class IPFSClient:
|
|
1100
1077
|
temp_dir: str = None,
|
1101
1078
|
max_retries: int = 3,
|
1102
1079
|
verbose: bool = True,
|
1080
|
+
seed_phrase: Optional[str] = None,
|
1103
1081
|
) -> Dict:
|
1104
1082
|
"""
|
1105
1083
|
Reconstruct a file from erasure-coded chunks using its metadata.
|
@@ -1110,6 +1088,7 @@ class IPFSClient:
|
|
1110
1088
|
temp_dir: Directory to use for temporary files (default: system temp)
|
1111
1089
|
max_retries: Maximum number of retry attempts for IPFS downloads
|
1112
1090
|
verbose: Whether to print progress information
|
1091
|
+
seed_phrase: Optional seed phrase to use for blockchain interactions (uses config if None)
|
1113
1092
|
|
1114
1093
|
Returns:
|
1115
1094
|
Dict: containing file reconstruction info.
|
@@ -1140,7 +1119,10 @@ class IPFSClient:
|
|
1140
1119
|
|
1141
1120
|
metadata_path = os.path.join(temp_dir, "metadata.json")
|
1142
1121
|
await self.download_file(
|
1143
|
-
metadata_cid,
|
1122
|
+
metadata_cid,
|
1123
|
+
metadata_path,
|
1124
|
+
max_retries=max_retries,
|
1125
|
+
seed_phrase=seed_phrase,
|
1144
1126
|
)
|
1145
1127
|
|
1146
1128
|
if verbose:
|
@@ -1238,6 +1220,7 @@ class IPFSClient:
|
|
1238
1220
|
path,
|
1239
1221
|
max_retries=max_retries,
|
1240
1222
|
skip_directory_check=True,
|
1223
|
+
seed_phrase=seed_phrase,
|
1241
1224
|
)
|
1242
1225
|
|
1243
1226
|
# Read chunk data
|
@@ -1332,20 +1315,48 @@ class IPFSClient:
|
|
1332
1315
|
)
|
1333
1316
|
|
1334
1317
|
# Wait for all chunks to be reconstructed
|
1318
|
+
if verbose:
|
1319
|
+
print(f"Waiting for {len(chunk_tasks)} chunk tasks to complete...")
|
1320
|
+
|
1321
|
+
# Track progress
|
1322
|
+
start_chunks_time = time.time()
|
1323
|
+
|
1324
|
+
# Wait for all chunks to complete (preserves ordering)
|
1335
1325
|
reconstructed_chunks = await asyncio.gather(*chunk_tasks)
|
1336
1326
|
|
1327
|
+
if verbose:
|
1328
|
+
print(
|
1329
|
+
f"All chunks downloaded and decoded successfully in {time.time() - start_chunks_time:.2f} seconds"
|
1330
|
+
)
|
1331
|
+
|
1337
1332
|
if verbose:
|
1338
1333
|
download_time = time.time() - start_time
|
1339
1334
|
print(f"Chunk reconstruction completed in {download_time:.2f} seconds")
|
1335
|
+
print(
|
1336
|
+
f"Received {len(reconstructed_chunks)} of {len(chunk_tasks)} expected chunks"
|
1337
|
+
)
|
1340
1338
|
|
1341
1339
|
# Step 5: Combine the reconstructed chunks into a file
|
1342
1340
|
print("Combining reconstructed chunks...")
|
1343
1341
|
|
1342
|
+
if verbose:
|
1343
|
+
print(f"Processing {len(reconstructed_chunks)} reconstructed chunks...")
|
1344
|
+
|
1344
1345
|
# Process chunks to remove padding correctly
|
1345
1346
|
processed_chunks = []
|
1346
1347
|
size_processed = 0
|
1347
1348
|
|
1349
|
+
# Guard against empty chunks
|
1350
|
+
if not reconstructed_chunks:
|
1351
|
+
raise ValueError("No chunks were successfully reconstructed")
|
1352
|
+
|
1353
|
+
# Track progress for large files
|
1354
|
+
chunk_process_start = time.time()
|
1355
|
+
|
1348
1356
|
for i, chunk in enumerate(reconstructed_chunks):
|
1357
|
+
if verbose and i % 10 == 0:
|
1358
|
+
print(f"Processing chunk {i+1}/{len(reconstructed_chunks)}...")
|
1359
|
+
|
1349
1360
|
# For all chunks except the last one, use full chunk size
|
1350
1361
|
if i < len(reconstructed_chunks) - 1:
|
1351
1362
|
# Calculate how much of this chunk should be used (handle full chunks)
|
@@ -1360,8 +1371,19 @@ class IPFSClient:
|
|
1360
1371
|
processed_chunks.append(chunk[:remaining_bytes])
|
1361
1372
|
size_processed += remaining_bytes
|
1362
1373
|
|
1374
|
+
if verbose:
|
1375
|
+
print(
|
1376
|
+
f"Chunk processing completed in {time.time() - chunk_process_start:.2f} seconds"
|
1377
|
+
)
|
1378
|
+
print(f"Concatenating {len(processed_chunks)} processed chunks...")
|
1379
|
+
|
1363
1380
|
# Concatenate all processed chunks
|
1381
|
+
concat_start = time.time()
|
1364
1382
|
file_data = b"".join(processed_chunks)
|
1383
|
+
if verbose:
|
1384
|
+
print(
|
1385
|
+
f"Concatenation completed in {time.time() - concat_start:.2f} seconds"
|
1386
|
+
)
|
1365
1387
|
|
1366
1388
|
# Double-check the final size matches the original
|
1367
1389
|
if len(file_data) != original_file["size"]:
|
@@ -1392,11 +1414,19 @@ class IPFSClient:
|
|
1392
1414
|
file_data = self.decrypt_data(file_data)
|
1393
1415
|
|
1394
1416
|
# Step 7: Write to the output file
|
1417
|
+
print(f"Writing {len(file_data)} bytes to {output_file}...")
|
1418
|
+
write_start = time.time()
|
1395
1419
|
with open(output_file, "wb") as f:
|
1396
1420
|
f.write(file_data)
|
1421
|
+
if verbose:
|
1422
|
+
print(
|
1423
|
+
f"File writing completed in {time.time() - write_start:.2f} seconds"
|
1424
|
+
)
|
1397
1425
|
|
1398
1426
|
# Step 8: Verify hash if available
|
1399
1427
|
if "hash" in original_file:
|
1428
|
+
print("Verifying file hash...")
|
1429
|
+
hash_start = time.time()
|
1400
1430
|
actual_hash = hashlib.sha256(file_data).hexdigest()
|
1401
1431
|
expected_hash = original_file["hash"]
|
1402
1432
|
|
@@ -1405,7 +1435,9 @@ class IPFSClient:
|
|
1405
1435
|
print(f" Expected: {expected_hash}")
|
1406
1436
|
print(f" Actual: {actual_hash}")
|
1407
1437
|
else:
|
1408
|
-
print(
|
1438
|
+
print(
|
1439
|
+
f"Hash verification successful in {time.time() - hash_start:.2f} seconds!"
|
1440
|
+
)
|
1409
1441
|
|
1410
1442
|
total_time = time.time() - start_time
|
1411
1443
|
if verbose:
|
@@ -1435,6 +1467,7 @@ class IPFSClient:
|
|
1435
1467
|
verbose: bool = True,
|
1436
1468
|
progress_callback: Optional[Callable[[str, int, int], None]] = None,
|
1437
1469
|
publish: bool = True,
|
1470
|
+
seed_phrase: Optional[str] = None,
|
1438
1471
|
) -> Dict[str, Any]:
|
1439
1472
|
"""
|
1440
1473
|
Erasure code a file, upload the chunks to IPFS, and store in the Hippius marketplace.
|
@@ -1456,6 +1489,7 @@ class IPFSClient:
|
|
1456
1489
|
publish: Whether to publish to the blockchain (True) or just perform local
|
1457
1490
|
erasure coding without publishing (False). When False, no password
|
1458
1491
|
is needed for seed phrase access.
|
1492
|
+
seed_phrase: Optional seed phrase to use for blockchain interactions (uses config if None)
|
1459
1493
|
|
1460
1494
|
Returns:
|
1461
1495
|
dict: Result including metadata CID and transaction hash (if published)
|
@@ -1466,7 +1500,7 @@ class IPFSClient:
|
|
1466
1500
|
"""
|
1467
1501
|
# Step 1: Create substrate client if we need it and are publishing
|
1468
1502
|
if substrate_client is None and publish:
|
1469
|
-
substrate_client = SubstrateClient()
|
1503
|
+
substrate_client = SubstrateClient(password=None, account_name=None)
|
1470
1504
|
# Step 2: Erasure code the file and upload chunks
|
1471
1505
|
metadata = await self.erasure_code_file(
|
1472
1506
|
file_path=file_path,
|
@@ -1477,6 +1511,7 @@ class IPFSClient:
|
|
1477
1511
|
max_retries=max_retries,
|
1478
1512
|
verbose=verbose,
|
1479
1513
|
progress_callback=progress_callback,
|
1514
|
+
seed_phrase=seed_phrase,
|
1480
1515
|
)
|
1481
1516
|
|
1482
1517
|
original_file = metadata["original_file"]
|
@@ -1531,7 +1566,7 @@ class IPFSClient:
|
|
1531
1566
|
)
|
1532
1567
|
|
1533
1568
|
tx_hash = await substrate_client.storage_request(
|
1534
|
-
files=all_file_inputs, miner_ids=miner_ids
|
1569
|
+
files=all_file_inputs, miner_ids=miner_ids, seed_phrase=seed_phrase
|
1535
1570
|
)
|
1536
1571
|
if verbose:
|
1537
1572
|
print("Successfully stored all files in marketplace!")
|
@@ -1564,7 +1599,10 @@ class IPFSClient:
|
|
1564
1599
|
return result
|
1565
1600
|
|
1566
1601
|
async def delete_file(
|
1567
|
-
self,
|
1602
|
+
self,
|
1603
|
+
cid: str,
|
1604
|
+
cancel_from_blockchain: bool = True,
|
1605
|
+
seed_phrase: Optional[str] = None,
|
1568
1606
|
) -> Dict[str, Any]:
|
1569
1607
|
"""
|
1570
1608
|
Delete a file or directory from IPFS and optionally cancel its storage on the blockchain.
|
@@ -1573,6 +1611,7 @@ class IPFSClient:
|
|
1573
1611
|
Args:
|
1574
1612
|
cid: Content Identifier (CID) of the file/directory to delete
|
1575
1613
|
cancel_from_blockchain: Whether to also cancel the storage request from the blockchain
|
1614
|
+
seed_phrase: Optional seed phrase to use for blockchain interactions (uses config if None)
|
1576
1615
|
|
1577
1616
|
Returns:
|
1578
1617
|
Dict containing the result of the operation
|
@@ -1678,7 +1717,9 @@ class IPFSClient:
|
|
1678
1717
|
if cancel_from_blockchain:
|
1679
1718
|
try:
|
1680
1719
|
substrate_client = SubstrateClient()
|
1681
|
-
await substrate_client.cancel_storage_request(
|
1720
|
+
await substrate_client.cancel_storage_request(
|
1721
|
+
cid, seed_phrase=seed_phrase
|
1722
|
+
)
|
1682
1723
|
print("Successfully cancelled storage from blockchain")
|
1683
1724
|
result["blockchain_result"] = {"success": True}
|
1684
1725
|
except Exception as e:
|
@@ -1709,6 +1750,7 @@ class IPFSClient:
|
|
1709
1750
|
metadata_cid: str,
|
1710
1751
|
cancel_from_blockchain: bool = True,
|
1711
1752
|
parallel_limit: int = 20,
|
1753
|
+
seed_phrase: Optional[str] = None,
|
1712
1754
|
) -> bool:
|
1713
1755
|
"""
|
1714
1756
|
Delete an erasure-coded file, including all its chunks in parallel.
|
@@ -1717,6 +1759,7 @@ class IPFSClient:
|
|
1717
1759
|
metadata_cid: CID of the metadata file for the erasure-coded file
|
1718
1760
|
cancel_from_blockchain: Whether to cancel storage from blockchain
|
1719
1761
|
parallel_limit: Maximum number of concurrent deletion operations
|
1762
|
+
seed_phrase: Optional seed phrase to use for blockchain interactions (uses config if None)
|
1720
1763
|
|
1721
1764
|
Returns:
|
1722
1765
|
bool: True if the deletion was successful, False otherwise
|
@@ -1793,7 +1836,9 @@ class IPFSClient:
|
|
1793
1836
|
# - HippiusAlreadyDeletedError if already deleted
|
1794
1837
|
# - HippiusFailedSubstrateDelete if transaction fails
|
1795
1838
|
# - Other exceptions for other failures
|
1796
|
-
await substrate_client.cancel_storage_request(
|
1839
|
+
await substrate_client.cancel_storage_request(
|
1840
|
+
metadata_cid, seed_phrase=seed_phrase
|
1841
|
+
)
|
1797
1842
|
|
1798
1843
|
# If we get here, either:
|
1799
1844
|
# 1. Blockchain cancellation succeeded (if requested)
|
hippius_sdk/ipfs_core.py
CHANGED
@@ -36,7 +36,7 @@ class AsyncIPFSClient:
|
|
36
36
|
api_url = "http://localhost:5001"
|
37
37
|
self.api_url = api_url
|
38
38
|
self.gateway = gateway
|
39
|
-
self.client = httpx.AsyncClient(timeout=
|
39
|
+
self.client = httpx.AsyncClient(timeout=300, follow_redirects=True)
|
40
40
|
|
41
41
|
async def close(self):
|
42
42
|
"""Close the httpx client."""
|