hippius 0.2.10__py3-none-any.whl → 0.2.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
hippius_sdk/ipfs.py CHANGED
@@ -13,16 +13,8 @@ import uuid
13
13
  from typing import Any, Callable, Dict, List, Optional
14
14
 
15
15
  import httpx
16
- import requests
17
16
 
18
17
  from hippius_sdk.config import get_config_value, get_encryption_key
19
- from hippius_sdk.errors import (
20
- HippiusAlreadyDeletedError,
21
- HippiusFailedIPFSUnpin,
22
- HippiusFailedSubstrateDelete,
23
- HippiusIPFSConnectionError,
24
- HippiusMetadataError,
25
- )
26
18
  from hippius_sdk.ipfs_core import AsyncIPFSClient
27
19
  from hippius_sdk.substrate import FileInput, SubstrateClient
28
20
  from hippius_sdk.utils import format_cid, format_size
@@ -203,6 +195,7 @@ class IPFSClient:
203
195
  include_formatted_size: bool = True,
204
196
  encrypt: Optional[bool] = None,
205
197
  max_retries: int = 3,
198
+ seed_phrase: Optional[str] = None,
206
199
  ) -> Dict[str, Any]:
207
200
  """
208
201
  Upload a file to IPFS with optional encryption.
@@ -212,6 +205,7 @@ class IPFSClient:
212
205
  include_formatted_size: Whether to include formatted size in the result (default: True)
213
206
  encrypt: Whether to encrypt the file (overrides default)
214
207
  max_retries: Maximum number of retry attempts (default: 3)
208
+ seed_phrase: Optional seed phrase to use for blockchain interactions (uses config if None)
215
209
 
216
210
  Returns:
217
211
  Dict[str, Any]: Dictionary containing:
@@ -291,6 +285,7 @@ class IPFSClient:
291
285
  dir_path: str,
292
286
  include_formatted_size: bool = True,
293
287
  encrypt: Optional[bool] = None,
288
+ seed_phrase: Optional[str] = None,
294
289
  ) -> Dict[str, Any]:
295
290
  """
296
291
  Upload a directory to IPFS with optional encryption of files.
@@ -299,6 +294,7 @@ class IPFSClient:
299
294
  dir_path: Path to the directory to upload
300
295
  include_formatted_size: Whether to include formatted size in the result (default: True)
301
296
  encrypt: Whether to encrypt files (overrides default)
297
+ seed_phrase: Optional seed phrase to use for blockchain interactions (uses config if None)
302
298
 
303
299
  Returns:
304
300
  Dict[str, Any]: Dictionary containing:
@@ -443,6 +439,8 @@ class IPFSClient:
443
439
  output_path: str,
444
440
  _: Optional[bool] = None,
445
441
  max_retries: int = 3,
442
+ seed_phrase: Optional[str] = None,
443
+ skip_directory_check: bool = False,
446
444
  ) -> Dict[str, Any]:
447
445
  """
448
446
  Download a file from IPFS with optional decryption.
@@ -452,6 +450,8 @@ class IPFSClient:
452
450
  output_path: Path where the downloaded file will be saved
453
451
  _: Whether to decrypt the file (overrides default)
454
452
  max_retries: Maximum number of retry attempts (default: 3)
453
+ seed_phrase: Optional seed phrase to use for blockchain interactions (uses config if None)
454
+ skip_directory_check: Whether to skip checking if the CID is a directory (default: False)
455
455
 
456
456
  Returns:
457
457
  Dict[str, Any]: Dictionary containing download results:
@@ -467,29 +467,76 @@ class IPFSClient:
467
467
  ValueError: If decryption is requested but fails
468
468
  """
469
469
  start_time = time.time()
470
+ is_directory = False
471
+
472
+ # Check if this is a directory (unless skip_directory_check is True)
473
+ if not skip_directory_check:
474
+ try:
475
+ ls_result = await self.client.ls(cid)
476
+ if isinstance(ls_result, dict) and ls_result.get("Objects", []):
477
+ # Check if we have Links in the object, which means it's a directory
478
+ for obj in ls_result["Objects"]:
479
+ if obj.get("Links", []):
480
+ is_directory = True
481
+ break
482
+ except Exception:
483
+ # If ls check fails, continue treating as a regular file
484
+ pass
470
485
 
471
- retries = 0
472
- while retries < max_retries:
486
+ # Handle based on whether it's a directory or file
487
+ if is_directory:
473
488
  try:
474
- url = f"{self.gateway}/ipfs/{cid}"
475
- async with self.client.client.stream(url=url, method="GET") as response:
476
- response.raise_for_status()
489
+ # Use the AsyncIPFSClient's directory handling method
490
+ os.makedirs(
491
+ os.path.dirname(os.path.abspath(output_path)), exist_ok=True
492
+ )
493
+ output_path = await self.client.download_directory(cid, output_path)
494
+ downloaded_size = 0
477
495
 
478
- with open(output_path, "wb") as f:
479
- async for chunk in response.aiter_bytes(chunk_size=8192):
480
- f.write(chunk)
496
+ # Walk through the downloaded directory to calculate total size
497
+ for root, _, files in os.walk(output_path):
498
+ for file in files:
499
+ file_path = os.path.join(root, file)
500
+ downloaded_size += os.path.getsize(file_path)
501
+
502
+ # Return success
503
+ return {
504
+ "success": True,
505
+ "output_path": output_path,
506
+ "size_bytes": downloaded_size,
507
+ "size_formatted": self.format_size(downloaded_size),
508
+ "elapsed_seconds": time.time() - start_time,
509
+ "decrypted": False,
510
+ "is_directory": True,
511
+ }
512
+ except Exception as e:
513
+ raise RuntimeError(f"Failed to download directory: {str(e)}")
514
+ else:
515
+ # Regular file download
516
+ retries = 0
517
+ while retries < max_retries:
518
+ try:
519
+ url = f"{self.gateway}/ipfs/{cid}"
520
+ async with self.client.client.stream(
521
+ url=url, method="GET"
522
+ ) as response:
523
+ response.raise_for_status()
524
+
525
+ with open(output_path, "wb") as f:
526
+ async for chunk in response.aiter_bytes(chunk_size=8192):
527
+ f.write(chunk)
481
528
  break
482
529
 
483
- except (httpx.HTTPError, IOError) as e:
484
- retries += 1
530
+ except (httpx.HTTPError, IOError) as e:
531
+ retries += 1
485
532
 
486
- if retries < max_retries:
487
- wait_time = 2**retries
488
- print(f"Download attempt {retries} failed: {str(e)}")
489
- print(f"Retrying in {wait_time} seconds...")
490
- time.sleep(wait_time)
491
- else:
492
- raise
533
+ if retries < max_retries:
534
+ wait_time = 2**retries
535
+ print(f"Download attempt {retries} failed: {str(e)}")
536
+ print(f"Retrying in {wait_time} seconds...")
537
+ time.sleep(wait_time)
538
+ else:
539
+ raise
493
540
 
494
541
  file_size_bytes = os.path.getsize(output_path)
495
542
  elapsed_time = time.time() - start_time
@@ -509,6 +556,7 @@ class IPFSClient:
509
556
  max_display_bytes: int = 1024,
510
557
  format_output: bool = True,
511
558
  decrypt: Optional[bool] = None,
559
+ seed_phrase: Optional[str] = None,
512
560
  ) -> Dict[str, Any]:
513
561
  """
514
562
  Get the content of a file from IPFS with optional decryption.
@@ -518,6 +566,7 @@ class IPFSClient:
518
566
  max_display_bytes: Maximum number of bytes to include in the preview (default: 1024)
519
567
  format_output: Whether to attempt to decode the content as text (default: True)
520
568
  decrypt: Whether to decrypt the file (overrides default)
569
+ seed_phrase: Optional seed phrase to use for blockchain interactions (uses config if None)
521
570
 
522
571
  Returns:
523
572
  Dict[str, Any]: Dictionary containing:
@@ -578,12 +627,15 @@ class IPFSClient:
578
627
 
579
628
  return result
580
629
 
581
- async def exists(self, cid: str) -> Dict[str, Any]:
630
+ async def exists(
631
+ self, cid: str, seed_phrase: Optional[str] = None
632
+ ) -> Dict[str, Any]:
582
633
  """
583
634
  Check if a CID exists on IPFS.
584
635
 
585
636
  Args:
586
637
  cid: Content Identifier (CID) to check
638
+ seed_phrase: Optional seed phrase to use for blockchain interactions (uses config if None)
587
639
 
588
640
  Returns:
589
641
  Dict[str, Any]: Dictionary containing:
@@ -603,7 +655,9 @@ class IPFSClient:
603
655
  "gateway_url": gateway_url if exists else None,
604
656
  }
605
657
 
606
- async def publish_global(self, cid: str) -> Dict[str, Any]:
658
+ async def publish_global(
659
+ self, cid: str, seed_phrase: Optional[str] = None
660
+ ) -> Dict[str, Any]:
607
661
  """
608
662
  Publish a CID to the global IPFS network, ensuring it's widely available.
609
663
 
@@ -612,6 +666,7 @@ class IPFSClient:
612
666
 
613
667
  Args:
614
668
  cid: Content Identifier (CID) to publish globally
669
+ seed_phrase: Optional seed phrase to use for blockchain interactions (uses config if None)
615
670
 
616
671
  Returns:
617
672
  Dict[str, Any]: Dictionary containing:
@@ -621,7 +676,7 @@ class IPFSClient:
621
676
  - message: Status message
622
677
  """
623
678
  # First ensure it's pinned locally
624
- pin_result = await self.pin(cid)
679
+ pin_result = await self.pin(cid, seed_phrase=seed_phrase)
625
680
 
626
681
  if not pin_result.get("success", False):
627
682
  return {
@@ -641,12 +696,13 @@ class IPFSClient:
641
696
  "message": "Content published to global IPFS network",
642
697
  }
643
698
 
644
- async def pin(self, cid: str) -> Dict[str, Any]:
699
+ async def pin(self, cid: str, seed_phrase: Optional[str] = None) -> Dict[str, Any]:
645
700
  """
646
701
  Pin a CID to IPFS to keep it available.
647
702
 
648
703
  Args:
649
704
  cid: Content Identifier (CID) to pin
705
+ seed_phrase: Optional seed phrase to use for blockchain interactions (uses config if None)
650
706
 
651
707
  Returns:
652
708
  Dict[str, Any]: Dictionary containing:
@@ -689,6 +745,7 @@ class IPFSClient:
689
745
  max_retries: int = 3,
690
746
  verbose: bool = True,
691
747
  progress_callback: Optional[Callable[[str, int, int], None]] = None,
748
+ seed_phrase: Optional[str] = None,
692
749
  ) -> Dict[str, Any]:
693
750
  """
694
751
  Split a file using erasure coding, then upload the chunks to IPFS.
@@ -708,6 +765,7 @@ class IPFSClient:
708
765
  verbose: Whether to print progress information
709
766
  progress_callback: Optional callback function for progress updates
710
767
  Function receives (stage_name, current, total)
768
+ seed_phrase: Optional seed phrase to use for blockchain interactions (uses config if None)
711
769
 
712
770
  Returns:
713
771
  dict: Metadata including the original file info and chunk information
@@ -1019,6 +1077,7 @@ class IPFSClient:
1019
1077
  temp_dir: str = None,
1020
1078
  max_retries: int = 3,
1021
1079
  verbose: bool = True,
1080
+ seed_phrase: Optional[str] = None,
1022
1081
  ) -> Dict:
1023
1082
  """
1024
1083
  Reconstruct a file from erasure-coded chunks using its metadata.
@@ -1029,6 +1088,7 @@ class IPFSClient:
1029
1088
  temp_dir: Directory to use for temporary files (default: system temp)
1030
1089
  max_retries: Maximum number of retry attempts for IPFS downloads
1031
1090
  verbose: Whether to print progress information
1091
+ seed_phrase: Optional seed phrase to use for blockchain interactions (uses config if None)
1032
1092
 
1033
1093
  Returns:
1034
1094
  Dict: containing file reconstruction info.
@@ -1059,7 +1119,10 @@ class IPFSClient:
1059
1119
 
1060
1120
  metadata_path = os.path.join(temp_dir, "metadata.json")
1061
1121
  await self.download_file(
1062
- metadata_cid, metadata_path, max_retries=max_retries
1122
+ metadata_cid,
1123
+ metadata_path,
1124
+ max_retries=max_retries,
1125
+ seed_phrase=seed_phrase,
1063
1126
  )
1064
1127
 
1065
1128
  if verbose:
@@ -1157,6 +1220,7 @@ class IPFSClient:
1157
1220
  path,
1158
1221
  max_retries=max_retries,
1159
1222
  skip_directory_check=True,
1223
+ seed_phrase=seed_phrase,
1160
1224
  )
1161
1225
 
1162
1226
  # Read chunk data
@@ -1251,20 +1315,48 @@ class IPFSClient:
1251
1315
  )
1252
1316
 
1253
1317
  # Wait for all chunks to be reconstructed
1318
+ if verbose:
1319
+ print(f"Waiting for {len(chunk_tasks)} chunk tasks to complete...")
1320
+
1321
+ # Track progress
1322
+ start_chunks_time = time.time()
1323
+
1324
+ # Wait for all chunks to complete (preserves ordering)
1254
1325
  reconstructed_chunks = await asyncio.gather(*chunk_tasks)
1255
1326
 
1327
+ if verbose:
1328
+ print(
1329
+ f"All chunks downloaded and decoded successfully in {time.time() - start_chunks_time:.2f} seconds"
1330
+ )
1331
+
1256
1332
  if verbose:
1257
1333
  download_time = time.time() - start_time
1258
1334
  print(f"Chunk reconstruction completed in {download_time:.2f} seconds")
1335
+ print(
1336
+ f"Received {len(reconstructed_chunks)} of {len(chunk_tasks)} expected chunks"
1337
+ )
1259
1338
 
1260
1339
  # Step 5: Combine the reconstructed chunks into a file
1261
1340
  print("Combining reconstructed chunks...")
1262
1341
 
1342
+ if verbose:
1343
+ print(f"Processing {len(reconstructed_chunks)} reconstructed chunks...")
1344
+
1263
1345
  # Process chunks to remove padding correctly
1264
1346
  processed_chunks = []
1265
1347
  size_processed = 0
1266
1348
 
1349
+ # Guard against empty chunks
1350
+ if not reconstructed_chunks:
1351
+ raise ValueError("No chunks were successfully reconstructed")
1352
+
1353
+ # Track progress for large files
1354
+ chunk_process_start = time.time()
1355
+
1267
1356
  for i, chunk in enumerate(reconstructed_chunks):
1357
+ if verbose and i % 10 == 0:
1358
+ print(f"Processing chunk {i+1}/{len(reconstructed_chunks)}...")
1359
+
1268
1360
  # For all chunks except the last one, use full chunk size
1269
1361
  if i < len(reconstructed_chunks) - 1:
1270
1362
  # Calculate how much of this chunk should be used (handle full chunks)
@@ -1279,8 +1371,19 @@ class IPFSClient:
1279
1371
  processed_chunks.append(chunk[:remaining_bytes])
1280
1372
  size_processed += remaining_bytes
1281
1373
 
1374
+ if verbose:
1375
+ print(
1376
+ f"Chunk processing completed in {time.time() - chunk_process_start:.2f} seconds"
1377
+ )
1378
+ print(f"Concatenating {len(processed_chunks)} processed chunks...")
1379
+
1282
1380
  # Concatenate all processed chunks
1381
+ concat_start = time.time()
1283
1382
  file_data = b"".join(processed_chunks)
1383
+ if verbose:
1384
+ print(
1385
+ f"Concatenation completed in {time.time() - concat_start:.2f} seconds"
1386
+ )
1284
1387
 
1285
1388
  # Double-check the final size matches the original
1286
1389
  if len(file_data) != original_file["size"]:
@@ -1311,11 +1414,19 @@ class IPFSClient:
1311
1414
  file_data = self.decrypt_data(file_data)
1312
1415
 
1313
1416
  # Step 7: Write to the output file
1417
+ print(f"Writing {len(file_data)} bytes to {output_file}...")
1418
+ write_start = time.time()
1314
1419
  with open(output_file, "wb") as f:
1315
1420
  f.write(file_data)
1421
+ if verbose:
1422
+ print(
1423
+ f"File writing completed in {time.time() - write_start:.2f} seconds"
1424
+ )
1316
1425
 
1317
1426
  # Step 8: Verify hash if available
1318
1427
  if "hash" in original_file:
1428
+ print("Verifying file hash...")
1429
+ hash_start = time.time()
1319
1430
  actual_hash = hashlib.sha256(file_data).hexdigest()
1320
1431
  expected_hash = original_file["hash"]
1321
1432
 
@@ -1324,7 +1435,9 @@ class IPFSClient:
1324
1435
  print(f" Expected: {expected_hash}")
1325
1436
  print(f" Actual: {actual_hash}")
1326
1437
  else:
1327
- print("Hash verification successful!")
1438
+ print(
1439
+ f"Hash verification successful in {time.time() - hash_start:.2f} seconds!"
1440
+ )
1328
1441
 
1329
1442
  total_time = time.time() - start_time
1330
1443
  if verbose:
@@ -1354,6 +1467,7 @@ class IPFSClient:
1354
1467
  verbose: bool = True,
1355
1468
  progress_callback: Optional[Callable[[str, int, int], None]] = None,
1356
1469
  publish: bool = True,
1470
+ seed_phrase: Optional[str] = None,
1357
1471
  ) -> Dict[str, Any]:
1358
1472
  """
1359
1473
  Erasure code a file, upload the chunks to IPFS, and store in the Hippius marketplace.
@@ -1375,6 +1489,7 @@ class IPFSClient:
1375
1489
  publish: Whether to publish to the blockchain (True) or just perform local
1376
1490
  erasure coding without publishing (False). When False, no password
1377
1491
  is needed for seed phrase access.
1492
+ seed_phrase: Optional seed phrase to use for blockchain interactions (uses config if None)
1378
1493
 
1379
1494
  Returns:
1380
1495
  dict: Result including metadata CID and transaction hash (if published)
@@ -1385,7 +1500,7 @@ class IPFSClient:
1385
1500
  """
1386
1501
  # Step 1: Create substrate client if we need it and are publishing
1387
1502
  if substrate_client is None and publish:
1388
- substrate_client = SubstrateClient()
1503
+ substrate_client = SubstrateClient(password=None, account_name=None)
1389
1504
  # Step 2: Erasure code the file and upload chunks
1390
1505
  metadata = await self.erasure_code_file(
1391
1506
  file_path=file_path,
@@ -1396,6 +1511,7 @@ class IPFSClient:
1396
1511
  max_retries=max_retries,
1397
1512
  verbose=verbose,
1398
1513
  progress_callback=progress_callback,
1514
+ seed_phrase=seed_phrase,
1399
1515
  )
1400
1516
 
1401
1517
  original_file = metadata["original_file"]
@@ -1450,7 +1566,7 @@ class IPFSClient:
1450
1566
  )
1451
1567
 
1452
1568
  tx_hash = await substrate_client.storage_request(
1453
- files=all_file_inputs, miner_ids=miner_ids
1569
+ files=all_file_inputs, miner_ids=miner_ids, seed_phrase=seed_phrase
1454
1570
  )
1455
1571
  if verbose:
1456
1572
  print("Successfully stored all files in marketplace!")
@@ -1483,7 +1599,10 @@ class IPFSClient:
1483
1599
  return result
1484
1600
 
1485
1601
  async def delete_file(
1486
- self, cid: str, cancel_from_blockchain: bool = True
1602
+ self,
1603
+ cid: str,
1604
+ cancel_from_blockchain: bool = True,
1605
+ seed_phrase: Optional[str] = None,
1487
1606
  ) -> Dict[str, Any]:
1488
1607
  """
1489
1608
  Delete a file or directory from IPFS and optionally cancel its storage on the blockchain.
@@ -1492,6 +1611,7 @@ class IPFSClient:
1492
1611
  Args:
1493
1612
  cid: Content Identifier (CID) of the file/directory to delete
1494
1613
  cancel_from_blockchain: Whether to also cancel the storage request from the blockchain
1614
+ seed_phrase: Optional seed phrase to use for blockchain interactions (uses config if None)
1495
1615
 
1496
1616
  Returns:
1497
1617
  Dict containing the result of the operation
@@ -1597,7 +1717,9 @@ class IPFSClient:
1597
1717
  if cancel_from_blockchain:
1598
1718
  try:
1599
1719
  substrate_client = SubstrateClient()
1600
- await substrate_client.cancel_storage_request(cid)
1720
+ await substrate_client.cancel_storage_request(
1721
+ cid, seed_phrase=seed_phrase
1722
+ )
1601
1723
  print("Successfully cancelled storage from blockchain")
1602
1724
  result["blockchain_result"] = {"success": True}
1603
1725
  except Exception as e:
@@ -1628,6 +1750,7 @@ class IPFSClient:
1628
1750
  metadata_cid: str,
1629
1751
  cancel_from_blockchain: bool = True,
1630
1752
  parallel_limit: int = 20,
1753
+ seed_phrase: Optional[str] = None,
1631
1754
  ) -> bool:
1632
1755
  """
1633
1756
  Delete an erasure-coded file, including all its chunks in parallel.
@@ -1636,6 +1759,7 @@ class IPFSClient:
1636
1759
  metadata_cid: CID of the metadata file for the erasure-coded file
1637
1760
  cancel_from_blockchain: Whether to cancel storage from blockchain
1638
1761
  parallel_limit: Maximum number of concurrent deletion operations
1762
+ seed_phrase: Optional seed phrase to use for blockchain interactions (uses config if None)
1639
1763
 
1640
1764
  Returns:
1641
1765
  bool: True if the deletion was successful, False otherwise
@@ -1712,7 +1836,9 @@ class IPFSClient:
1712
1836
  # - HippiusAlreadyDeletedError if already deleted
1713
1837
  # - HippiusFailedSubstrateDelete if transaction fails
1714
1838
  # - Other exceptions for other failures
1715
- await substrate_client.cancel_storage_request(metadata_cid)
1839
+ await substrate_client.cancel_storage_request(
1840
+ metadata_cid, seed_phrase=seed_phrase
1841
+ )
1716
1842
 
1717
1843
  # If we get here, either:
1718
1844
  # 1. Blockchain cancellation succeeded (if requested)