hippius 0.2.4__py3-none-any.whl → 0.2.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {hippius-0.2.4.dist-info → hippius-0.2.6.dist-info}/METADATA +1 -1
- hippius-0.2.6.dist-info/RECORD +17 -0
- hippius_sdk/__init__.py +21 -10
- hippius_sdk/cli.py +12 -0
- hippius_sdk/cli_handlers.py +413 -57
- hippius_sdk/cli_parser.py +20 -0
- hippius_sdk/cli_rich.py +8 -2
- hippius_sdk/client.py +5 -3
- hippius_sdk/errors.py +77 -0
- hippius_sdk/ipfs.py +249 -298
- hippius_sdk/ipfs_core.py +216 -10
- hippius_sdk/substrate.py +101 -14
- hippius-0.2.4.dist-info/RECORD +0 -16
- {hippius-0.2.4.dist-info → hippius-0.2.6.dist-info}/WHEEL +0 -0
- {hippius-0.2.4.dist-info → hippius-0.2.6.dist-info}/entry_points.txt +0 -0
hippius_sdk/cli_handlers.py
CHANGED
@@ -9,6 +9,7 @@ import asyncio
|
|
9
9
|
import base64
|
10
10
|
import getpass
|
11
11
|
import json
|
12
|
+
import math
|
12
13
|
import os
|
13
14
|
import tempfile
|
14
15
|
import time
|
@@ -44,6 +45,13 @@ from hippius_sdk.cli_rich import (
|
|
44
45
|
success,
|
45
46
|
warning,
|
46
47
|
)
|
48
|
+
from hippius_sdk.errors import (
|
49
|
+
HippiusAlreadyDeletedError,
|
50
|
+
HippiusFailedIPFSUnpin,
|
51
|
+
HippiusFailedSubstrateDelete,
|
52
|
+
HippiusMetadataError,
|
53
|
+
)
|
54
|
+
from hippius_sdk.substrate import FileInput
|
47
55
|
|
48
56
|
try:
|
49
57
|
import nacl.secret
|
@@ -147,6 +155,12 @@ async def handle_download(
|
|
147
155
|
f"Size: [bold cyan]{result['size_bytes']:,}[/bold cyan] bytes ([bold cyan]{result['size_formatted']}[/bold cyan])",
|
148
156
|
]
|
149
157
|
|
158
|
+
# Add details about content type
|
159
|
+
if result.get("is_directory", False):
|
160
|
+
details.append("[bold green]Content type: Directory[/bold green]")
|
161
|
+
else:
|
162
|
+
details.append("[bold blue]Content type: File[/bold blue]")
|
163
|
+
|
150
164
|
if result.get("decrypted"):
|
151
165
|
details.append("[bold yellow]File was decrypted during download[/bold yellow]")
|
152
166
|
|
@@ -233,6 +247,7 @@ async def handle_store(
|
|
233
247
|
file_path: str,
|
234
248
|
miner_ids: Optional[List[str]] = None,
|
235
249
|
encrypt: Optional[bool] = None,
|
250
|
+
publish: bool = True,
|
236
251
|
) -> int:
|
237
252
|
"""Handle the store command (upload file to IPFS and store on Substrate)"""
|
238
253
|
if not os.path.exists(file_path):
|
@@ -243,6 +258,16 @@ async def handle_store(
|
|
243
258
|
error(f"[bold]{file_path}[/bold] is not a file")
|
244
259
|
return 1
|
245
260
|
|
261
|
+
# If publishing is enabled, ensure we have a valid substrate client by accessing it
|
262
|
+
# This will trigger password prompts if needed right at the beginning
|
263
|
+
if publish and hasattr(client, "substrate_client") and client.substrate_client:
|
264
|
+
try:
|
265
|
+
# Force keypair initialization - this will prompt for password if needed
|
266
|
+
_ = client.substrate_client._ensure_keypair()
|
267
|
+
except Exception as e:
|
268
|
+
warning(f"Failed to initialize blockchain client: {str(e)}")
|
269
|
+
warning("Will continue with upload but blockchain publishing may fail")
|
270
|
+
|
246
271
|
# Get file size for display
|
247
272
|
file_size = os.path.getsize(file_path)
|
248
273
|
file_name = os.path.basename(file_path)
|
@@ -269,6 +294,19 @@ async def handle_store(
|
|
269
294
|
"[bold yellow]Encryption: Using default setting[/bold yellow]"
|
270
295
|
)
|
271
296
|
|
297
|
+
# Add publishing status
|
298
|
+
if not publish:
|
299
|
+
upload_info.append(
|
300
|
+
"[bold yellow]Publishing: Disabled (local upload only)[/bold yellow]"
|
301
|
+
)
|
302
|
+
log(
|
303
|
+
"\nUpload will be local only - not publishing to blockchain or pinning to IPFS"
|
304
|
+
)
|
305
|
+
else:
|
306
|
+
upload_info.append(
|
307
|
+
"[bold green]Publishing: Enabled (publishing to blockchain)[/bold green]"
|
308
|
+
)
|
309
|
+
|
272
310
|
# Parse miner IDs if provided
|
273
311
|
miner_id_list = None
|
274
312
|
if miner_ids:
|
@@ -305,13 +343,51 @@ async def handle_store(
|
|
305
343
|
updater = asyncio.create_task(update_progress())
|
306
344
|
|
307
345
|
try:
|
308
|
-
# Use the
|
346
|
+
# Use the upload_file method to get the CID
|
309
347
|
result = await client.upload_file(
|
310
348
|
file_path=file_path,
|
311
349
|
encrypt=encrypt,
|
312
|
-
# miner_ids=miner_id_list
|
313
350
|
)
|
314
351
|
|
352
|
+
# If publishing is enabled, store on blockchain
|
353
|
+
if publish and result.get("cid"):
|
354
|
+
try:
|
355
|
+
# Pin and publish the file globally
|
356
|
+
# First pin in IPFS (essential step for publishing)
|
357
|
+
await client.ipfs_client.pin(result["cid"])
|
358
|
+
|
359
|
+
# Then publish globally to make available across network
|
360
|
+
publish_result = await client.ipfs_client.publish_global(
|
361
|
+
result["cid"]
|
362
|
+
)
|
363
|
+
|
364
|
+
log(
|
365
|
+
"\n[green]File has been pinned to IPFS and published to the network[/green]"
|
366
|
+
)
|
367
|
+
|
368
|
+
# Add gateway URL to the result for use in output
|
369
|
+
if "cid" in result:
|
370
|
+
result[
|
371
|
+
"gateway_url"
|
372
|
+
] = f"{client.ipfs_client.gateway}/ipfs/{result['cid']}"
|
373
|
+
|
374
|
+
# Store on blockchain if miners are provided
|
375
|
+
if miner_ids:
|
376
|
+
# Create a file input for blockchain storage
|
377
|
+
file_input = FileInput(
|
378
|
+
file_hash=result["cid"], file_name=file_name
|
379
|
+
)
|
380
|
+
|
381
|
+
# Submit storage request
|
382
|
+
tx_hash = await client.substrate_client.storage_request(
|
383
|
+
files=[file_input], miner_ids=miner_id_list
|
384
|
+
)
|
385
|
+
|
386
|
+
# Add transaction hash to result
|
387
|
+
result["transaction_hash"] = tx_hash
|
388
|
+
except Exception as e:
|
389
|
+
warning(f"Failed to publish file globally: {str(e)}")
|
390
|
+
|
315
391
|
progress.update(task, completed=100)
|
316
392
|
updater.cancel()
|
317
393
|
|
@@ -323,16 +399,24 @@ async def handle_store(
|
|
323
399
|
f"IPFS CID: [bold cyan]{result['cid']}[/bold cyan]",
|
324
400
|
]
|
325
401
|
|
326
|
-
|
327
|
-
|
328
|
-
|
329
|
-
|
402
|
+
# Always add the gateway URL
|
403
|
+
gateway_url = result.get("gateway_url")
|
404
|
+
if not gateway_url and "cid" in result:
|
405
|
+
gateway_url = f"{client.ipfs_client.gateway}/ipfs/{result['cid']}"
|
406
|
+
|
407
|
+
if gateway_url:
|
408
|
+
success_info.append(f"Gateway URL: [link]{gateway_url}[/link]")
|
330
409
|
|
331
410
|
if result.get("encrypted"):
|
332
411
|
success_info.append(
|
333
412
|
"[bold yellow]File was encrypted during upload[/bold yellow]"
|
334
413
|
)
|
335
414
|
|
415
|
+
if not publish:
|
416
|
+
success_info.append(
|
417
|
+
"[bold yellow]File was uploaded locally only (not published to blockchain)[/bold yellow]"
|
418
|
+
)
|
419
|
+
|
336
420
|
print_panel("\n".join(success_info), title="Upload Successful")
|
337
421
|
|
338
422
|
# If we stored in the marketplace
|
@@ -358,6 +442,7 @@ async def handle_store_dir(
|
|
358
442
|
dir_path: str,
|
359
443
|
miner_ids: Optional[List[str]] = None,
|
360
444
|
encrypt: Optional[bool] = None,
|
445
|
+
publish: bool = True,
|
361
446
|
) -> int:
|
362
447
|
"""Handle the store directory command"""
|
363
448
|
if not os.path.exists(dir_path):
|
@@ -368,6 +453,16 @@ async def handle_store_dir(
|
|
368
453
|
error(f"[bold]{dir_path}[/bold] is not a directory")
|
369
454
|
return 1
|
370
455
|
|
456
|
+
# If publishing is enabled, ensure we have a valid substrate client by accessing it
|
457
|
+
# This will trigger password prompts if needed right at the beginning
|
458
|
+
if publish and hasattr(client, "substrate_client") and client.substrate_client:
|
459
|
+
try:
|
460
|
+
# Force keypair initialization - this will prompt for password if needed
|
461
|
+
_ = client.substrate_client._ensure_keypair()
|
462
|
+
except Exception as e:
|
463
|
+
warning(f"Failed to initialize blockchain client: {str(e)}")
|
464
|
+
warning("Will continue with upload but blockchain publishing may fail")
|
465
|
+
|
371
466
|
# Upload information panel
|
372
467
|
upload_info = [f"Directory: [bold]{dir_path}[/bold]"]
|
373
468
|
|
@@ -417,12 +512,92 @@ async def handle_store_dir(
|
|
417
512
|
updater = asyncio.create_task(update_progress())
|
418
513
|
|
419
514
|
try:
|
515
|
+
# Upload info message based on publish flag
|
516
|
+
if not publish:
|
517
|
+
upload_info.append(
|
518
|
+
"[bold yellow]Publishing: Disabled (local upload only)[/bold yellow]"
|
519
|
+
)
|
520
|
+
log(
|
521
|
+
"\nUpload will be local only - not publishing to blockchain or pinning to IPFS"
|
522
|
+
)
|
523
|
+
else:
|
524
|
+
upload_info.append(
|
525
|
+
"[bold green]Publishing: Enabled (publishing to blockchain)[/bold green]"
|
526
|
+
)
|
527
|
+
|
528
|
+
# Display updated upload information panel
|
529
|
+
print_panel("\n".join(upload_info), title="Directory Upload Operation")
|
530
|
+
|
420
531
|
# Use the store_directory method
|
421
532
|
result = await client.ipfs_client.upload_directory(
|
422
533
|
dir_path=dir_path,
|
423
534
|
encrypt=encrypt,
|
424
535
|
)
|
425
536
|
|
537
|
+
# Skip publishing to blockchain if publish is False
|
538
|
+
if not publish:
|
539
|
+
# Remove any blockchain-related data from result to ensure we don't try to use it
|
540
|
+
if "transaction_hash" in result:
|
541
|
+
del result["transaction_hash"]
|
542
|
+
else:
|
543
|
+
# If we want to publish, make sure files are pinned globally
|
544
|
+
try:
|
545
|
+
# Add gateway URL to the result for use in output
|
546
|
+
if "cid" in result:
|
547
|
+
result[
|
548
|
+
"gateway_url"
|
549
|
+
] = f"{client.ipfs_client.gateway}/ipfs/{result['cid']}"
|
550
|
+
|
551
|
+
# Pin and publish the directory root CID globally
|
552
|
+
# First pin in IPFS (essential step for publishing)
|
553
|
+
await client.ipfs_client.pin(result["cid"])
|
554
|
+
|
555
|
+
# Then publish globally to make available across network
|
556
|
+
await client.ipfs_client.publish_global(result["cid"])
|
557
|
+
|
558
|
+
log(
|
559
|
+
"\n[green]Directory has been pinned to IPFS and published to the network[/green]"
|
560
|
+
)
|
561
|
+
|
562
|
+
# Also pin and publish individual files if available
|
563
|
+
for file_info in result.get("files", []):
|
564
|
+
if "cid" in file_info:
|
565
|
+
try:
|
566
|
+
# Pin each file to ensure availability
|
567
|
+
await client.ipfs_client.pin(file_info["cid"])
|
568
|
+
|
569
|
+
# Then publish globally
|
570
|
+
await client.ipfs_client.publish_global(
|
571
|
+
file_info["cid"]
|
572
|
+
)
|
573
|
+
except Exception as e:
|
574
|
+
warning(
|
575
|
+
f"Failed to publish file {file_info['name']} globally: {str(e)}"
|
576
|
+
)
|
577
|
+
|
578
|
+
# Store on blockchain if miners are provided - this is what requires a password
|
579
|
+
if (
|
580
|
+
miner_ids
|
581
|
+
and hasattr(client, "substrate_client")
|
582
|
+
and client.substrate_client
|
583
|
+
):
|
584
|
+
# Create a file input for blockchain storage
|
585
|
+
file_input = FileInput(
|
586
|
+
file_hash=result["cid"],
|
587
|
+
file_name=os.path.basename(dir_path),
|
588
|
+
)
|
589
|
+
|
590
|
+
# This will prompt for a password if needed
|
591
|
+
tx_hash = await client.substrate_client.storage_request(
|
592
|
+
files=[file_input], miner_ids=miner_id_list
|
593
|
+
)
|
594
|
+
|
595
|
+
# Add transaction hash to result
|
596
|
+
result["transaction_hash"] = tx_hash
|
597
|
+
|
598
|
+
except Exception as e:
|
599
|
+
warning(f"Failed to publish directory globally: {str(e)}")
|
600
|
+
|
426
601
|
# Complete the progress
|
427
602
|
progress.update(task, completed=100)
|
428
603
|
# Cancel the updater task
|
@@ -436,9 +611,23 @@ async def handle_store_dir(
|
|
436
611
|
f"Directory CID: [bold cyan]{result['cid']}[/bold cyan]",
|
437
612
|
]
|
438
613
|
|
439
|
-
|
614
|
+
# Always add the gateway URL
|
615
|
+
gateway_url = result.get("gateway_url")
|
616
|
+
if not gateway_url and "cid" in result:
|
617
|
+
gateway_url = f"{client.ipfs_client.gateway}/ipfs/{result['cid']}"
|
618
|
+
|
619
|
+
if gateway_url:
|
620
|
+
success_info.append(f"Gateway URL: [link]{gateway_url}[/link]")
|
621
|
+
|
622
|
+
# Add encryption and publish status to success info
|
623
|
+
if result.get("encrypted"):
|
440
624
|
success_info.append(
|
441
|
-
|
625
|
+
"[bold yellow]Directory was encrypted during upload[/bold yellow]"
|
626
|
+
)
|
627
|
+
|
628
|
+
if not publish:
|
629
|
+
success_info.append(
|
630
|
+
"[bold yellow]Directory was uploaded locally only (not published to blockchain)[/bold yellow]"
|
442
631
|
)
|
443
632
|
|
444
633
|
print_panel("\n".join(success_info), title="Directory Upload Successful")
|
@@ -461,10 +650,19 @@ async def handle_store_dir(
|
|
461
650
|
["Index", "Filename", "CID"],
|
462
651
|
)
|
463
652
|
|
464
|
-
# If we stored in the marketplace
|
465
|
-
if
|
653
|
+
# If publishing is enabled and we stored in the marketplace
|
654
|
+
if publish:
|
655
|
+
# We only include transaction hash stuff if we actually created a blockchain transaction
|
656
|
+
if "transaction_hash" in result:
|
657
|
+
log(
|
658
|
+
f"\nStored in marketplace. Transaction hash: [bold]{result['transaction_hash']}[/bold]"
|
659
|
+
)
|
660
|
+
else:
|
661
|
+
# If publish is true but no transaction hash, just indicate files were published to IPFS
|
662
|
+
log("\n[green]Directory was published to IPFS network.[/green]")
|
663
|
+
elif not publish:
|
466
664
|
log(
|
467
|
-
|
665
|
+
"\n[yellow]Files were uploaded locally only. No blockchain publication or IPFS pinning.[/yellow]"
|
468
666
|
)
|
469
667
|
|
470
668
|
return 0
|
@@ -991,6 +1189,36 @@ async def handle_erasure_code(
|
|
991
1189
|
)
|
992
1190
|
return 1
|
993
1191
|
|
1192
|
+
# Request password early if we're going to publish to the blockchain
|
1193
|
+
if publish and client.substrate_client._seed_phrase is None:
|
1194
|
+
# First check if we have an encrypted seed phrase that will require a password
|
1195
|
+
config = load_config()
|
1196
|
+
account_name = client.substrate_client._account_name or get_active_account()
|
1197
|
+
|
1198
|
+
if account_name and account_name in config["substrate"].get("accounts", {}):
|
1199
|
+
account_data = config["substrate"]["accounts"][account_name]
|
1200
|
+
is_encoded = account_data.get("seed_phrase_encoded", False)
|
1201
|
+
|
1202
|
+
if is_encoded:
|
1203
|
+
warning("Wallet password will be required for publishing to blockchain")
|
1204
|
+
password = getpass.getpass(
|
1205
|
+
"Enter password to decrypt seed phrase: \n\n"
|
1206
|
+
)
|
1207
|
+
|
1208
|
+
# Store the password in client for later use
|
1209
|
+
client.substrate_client._seed_phrase_password = password
|
1210
|
+
|
1211
|
+
# Pre-authenticate to ensure the password is correct
|
1212
|
+
try:
|
1213
|
+
seed_phrase = decrypt_seed_phrase(password, account_name)
|
1214
|
+
if not seed_phrase:
|
1215
|
+
error("Failed to decrypt seed phrase. Incorrect password?")
|
1216
|
+
return 1
|
1217
|
+
client.substrate_client._seed_phrase = seed_phrase
|
1218
|
+
except Exception as e:
|
1219
|
+
error(f"Error decrypting seed phrase: {e}")
|
1220
|
+
return 1
|
1221
|
+
|
994
1222
|
# Get file size
|
995
1223
|
file_size = os.path.getsize(file_path)
|
996
1224
|
file_name = os.path.basename(file_path)
|
@@ -1020,11 +1248,18 @@ async def handle_erasure_code(
|
|
1020
1248
|
|
1021
1249
|
chunk_size = new_chunk_size
|
1022
1250
|
|
1251
|
+
# Calculate total number of chunks that will be created
|
1252
|
+
total_original_chunks = max(1, int(math.ceil(file_size / chunk_size)))
|
1253
|
+
total_encoded_chunks = total_original_chunks * m
|
1254
|
+
estimated_size_per_chunk = min(chunk_size, file_size / total_original_chunks)
|
1255
|
+
|
1023
1256
|
# Create parameter information panel
|
1024
1257
|
param_info = [
|
1025
1258
|
f"File: [bold]{file_name}[/bold] ([bold cyan]{file_size / 1024 / 1024:.2f} MB[/bold cyan])",
|
1026
1259
|
f"Parameters: k=[bold]{k}[/bold], m=[bold]{m}[/bold] (need {k} of {m} chunks to reconstruct)",
|
1027
1260
|
f"Chunk size: [bold cyan]{chunk_size / 1024 / 1024:.6f} MB[/bold cyan]",
|
1261
|
+
f"Total chunks to be created: [bold yellow]{total_encoded_chunks}[/bold yellow] ({total_original_chunks} original chunks × {m} encoded chunks each)",
|
1262
|
+
f"Estimated storage required: [bold magenta]{(total_encoded_chunks * estimated_size_per_chunk) / (1024 * 1024):.2f} MB[/bold magenta]",
|
1028
1263
|
]
|
1029
1264
|
|
1030
1265
|
# Add encryption status
|
@@ -1394,89 +1629,210 @@ async def handle_reconstruct(
|
|
1394
1629
|
|
1395
1630
|
|
1396
1631
|
async def handle_delete(client: HippiusClient, cid: str, force: bool = False) -> int:
|
1397
|
-
"""Handle the delete command"""
|
1398
|
-
info(f"Preparing to delete
|
1632
|
+
"""Handle the delete command for files or directories"""
|
1633
|
+
info(f"Preparing to delete content with CID: [bold cyan]{cid}[/bold cyan]")
|
1634
|
+
|
1635
|
+
# First check if this is a directory
|
1636
|
+
try:
|
1637
|
+
exists_result = await client.exists(cid)
|
1638
|
+
if not exists_result["exists"]:
|
1639
|
+
error(f"CID [bold cyan]{cid}[/bold cyan] not found on IPFS")
|
1640
|
+
return 1
|
1641
|
+
except Exception as e:
|
1642
|
+
warning(f"Error checking if CID exists: {e}")
|
1399
1643
|
|
1400
1644
|
if not force:
|
1401
|
-
warning("This will cancel storage and remove the
|
1645
|
+
warning("This will cancel storage and remove the content from the marketplace.")
|
1402
1646
|
confirm = input("Continue? (y/n): ").strip().lower()
|
1403
1647
|
if confirm != "y":
|
1404
1648
|
log("Deletion cancelled", style="yellow")
|
1405
1649
|
return 0
|
1406
1650
|
|
1407
|
-
|
1408
|
-
|
1651
|
+
# Show spinner during deletion
|
1652
|
+
with console.status("[cyan]Deleting content...[/cyan]", spinner="dots") as status:
|
1653
|
+
result = await client.delete_file(cid)
|
1409
1654
|
|
1410
|
-
|
1411
|
-
|
1655
|
+
# Display results
|
1656
|
+
is_directory = result.get("is_directory", False)
|
1657
|
+
child_files = result.get("child_files", [])
|
1412
1658
|
|
1413
|
-
|
1414
|
-
|
1415
|
-
|
1416
|
-
|
1417
|
-
)
|
1418
|
-
|
1419
|
-
# Create an informative panel with notes
|
1420
|
-
notes = [
|
1421
|
-
"1. The file is now unpinned from the marketplace",
|
1422
|
-
"2. The CID may still resolve temporarily until garbage collection occurs",
|
1423
|
-
"3. If the file was published to the global IPFS network, it may still be",
|
1424
|
-
" available through other nodes that pinned it",
|
1659
|
+
if is_directory:
|
1660
|
+
# Directory deletion
|
1661
|
+
details = [
|
1662
|
+
f"Successfully deleted directory: [bold cyan]{cid}[/bold cyan]",
|
1663
|
+
f"Child files unpinned: [bold]{len(child_files)}[/bold]",
|
1425
1664
|
]
|
1426
1665
|
|
1427
|
-
|
1428
|
-
|
1666
|
+
# If there are child files, show them in a table
|
1667
|
+
if child_files:
|
1668
|
+
table_data = []
|
1669
|
+
for i, file in enumerate(
|
1670
|
+
child_files[:10], 1
|
1671
|
+
): # Limit to first 10 files if many
|
1672
|
+
table_data.append(
|
1673
|
+
{
|
1674
|
+
"Index": str(i),
|
1675
|
+
"Filename": file.get("name", "unknown"),
|
1676
|
+
"CID": file.get("cid", "unknown"),
|
1677
|
+
}
|
1678
|
+
)
|
1429
1679
|
|
1430
|
-
|
1680
|
+
if len(child_files) > 10:
|
1681
|
+
table_data.append(
|
1682
|
+
{
|
1683
|
+
"Index": "...",
|
1684
|
+
"Filename": f"({len(child_files) - 10} more files)",
|
1685
|
+
"CID": "...",
|
1686
|
+
}
|
1687
|
+
)
|
1431
1688
|
|
1432
|
-
|
1689
|
+
print_table(
|
1690
|
+
"Unpinned Child Files", table_data, ["Index", "Filename", "CID"]
|
1691
|
+
)
|
1433
1692
|
else:
|
1434
|
-
|
1693
|
+
# Regular file deletion
|
1694
|
+
details = [f"Successfully deleted file: [bold cyan]{cid}[/bold cyan]"]
|
1695
|
+
|
1696
|
+
if "duration_seconds" in result.get("timing", {}):
|
1697
|
+
details.append(
|
1698
|
+
f"Deletion completed in [bold green]{result['timing']['duration_seconds']:.2f}[/bold green] seconds"
|
1699
|
+
)
|
1700
|
+
|
1701
|
+
print_panel("\n".join(details), title="Deletion Complete")
|
1702
|
+
|
1703
|
+
# Create an informative panel with notes
|
1704
|
+
notes = [
|
1705
|
+
"1. The content is now unpinned from the marketplace",
|
1706
|
+
"2. The CID may still resolve temporarily until garbage collection occurs",
|
1707
|
+
"3. If the content was published to the global IPFS network, it may still be",
|
1708
|
+
" available through other nodes that pinned it",
|
1709
|
+
]
|
1710
|
+
|
1711
|
+
print_panel("\n".join(notes), title="Important Notes")
|
1712
|
+
|
1713
|
+
return 0
|
1435
1714
|
|
1436
1715
|
|
1437
1716
|
async def handle_ec_delete(
|
1438
1717
|
client: HippiusClient, metadata_cid: str, force: bool = False
|
1439
1718
|
) -> int:
|
1440
|
-
"""Handle the
|
1441
|
-
|
1442
|
-
|
1443
|
-
)
|
1719
|
+
"""Handle the erasure-code delete command"""
|
1720
|
+
|
1721
|
+
# Create a stylish header with the CID
|
1722
|
+
info(f"Preparing to delete erasure-coded file with metadata CID:")
|
1723
|
+
print_panel(f"[bold cyan]{metadata_cid}[/bold cyan]", title="Metadata CID")
|
1444
1724
|
|
1725
|
+
# Confirm the deletion if not forced
|
1445
1726
|
if not force:
|
1446
|
-
|
1447
|
-
|
1727
|
+
warning_text = [
|
1728
|
+
"This will cancel the storage of this file on the Hippius blockchain.",
|
1729
|
+
"The file metadata will be removed from blockchain storage tracking.",
|
1730
|
+
"[dim]Note: Only the metadata CID will be canceled; contents may remain on IPFS.[/dim]",
|
1731
|
+
]
|
1732
|
+
print_panel("\n".join(warning_text), title="Warning")
|
1733
|
+
|
1734
|
+
confirm = input("Continue with deletion? (y/n): ").strip().lower()
|
1448
1735
|
if confirm != "y":
|
1449
1736
|
log("Deletion cancelled", style="yellow")
|
1450
1737
|
return 0
|
1451
1738
|
|
1452
1739
|
try:
|
1740
|
+
# First, pre-authenticate the client to get any password prompts out of the way
|
1741
|
+
# This accesses the substrate client to trigger authentication
|
1742
|
+
if not client.substrate_client._keypair:
|
1743
|
+
client.substrate_client._ensure_keypair()
|
1744
|
+
|
1745
|
+
# Now we can show the spinner after any password prompts
|
1453
1746
|
info("Deleting erasure-coded file from marketplace...")
|
1454
|
-
result = await client.delete_ec_file(metadata_cid)
|
1455
1747
|
|
1456
|
-
|
1457
|
-
|
1748
|
+
# Create a more detailed spinner with phases
|
1749
|
+
with console.status(
|
1750
|
+
"[cyan]Processing file metadata and chunks...[/cyan]", spinner="dots"
|
1751
|
+
) as status:
|
1752
|
+
try:
|
1753
|
+
# Use the specialized delete method that now throws specific exceptions
|
1754
|
+
await client.delete_ec_file(metadata_cid)
|
1458
1755
|
|
1459
|
-
|
1460
|
-
|
1461
|
-
|
1462
|
-
details.append(f"Deleted [bold]{chunks_deleted}[/bold] chunks")
|
1756
|
+
# If we get here, deletion was successful
|
1757
|
+
deletion_success = True
|
1758
|
+
already_deleted = False
|
1463
1759
|
|
1464
|
-
|
1465
|
-
|
1466
|
-
|
1760
|
+
except HippiusAlreadyDeletedError:
|
1761
|
+
# Special case - already deleted
|
1762
|
+
deletion_success = False
|
1763
|
+
already_deleted = True
|
1764
|
+
|
1765
|
+
except HippiusFailedSubstrateDelete as e:
|
1766
|
+
# Blockchain deletion failed
|
1767
|
+
error(f"Blockchain storage cancellation failed: {e}")
|
1768
|
+
return 1
|
1769
|
+
|
1770
|
+
except HippiusFailedIPFSUnpin as e:
|
1771
|
+
# IPFS unpinning failed, but blockchain deletion succeeded
|
1772
|
+
warning(
|
1773
|
+
f"Note: Some IPFS operations failed, but blockchain storage was successfully canceled"
|
1774
|
+
)
|
1775
|
+
# Consider this a success for the user since the more important blockchain part worked
|
1776
|
+
deletion_success = True
|
1777
|
+
already_deleted = False
|
1778
|
+
|
1779
|
+
except HippiusMetadataError as e:
|
1780
|
+
# Metadata parsing failed, but we can still continue
|
1781
|
+
warning(
|
1782
|
+
f"Note: Metadata file was corrupted, but blockchain storage was successfully canceled"
|
1467
1783
|
)
|
1784
|
+
# Consider this a success for the user since the blockchain part worked
|
1785
|
+
deletion_success = True
|
1786
|
+
already_deleted = False
|
1468
1787
|
|
1469
|
-
|
1788
|
+
except Exception as e:
|
1789
|
+
# Handle any unexpected errors
|
1790
|
+
error(f"Unexpected error: {e}")
|
1791
|
+
return 1
|
1470
1792
|
|
1793
|
+
# Show the result
|
1794
|
+
if deletion_success:
|
1795
|
+
# Create a success panel
|
1796
|
+
success_panel = [
|
1797
|
+
"[bold green]✓[/bold green] Metadata CID canceled from blockchain storage",
|
1798
|
+
f"[dim]This file is no longer tracked for storage payments[/dim]",
|
1799
|
+
"",
|
1800
|
+
"[dim]To purge file data completely:[/dim]",
|
1801
|
+
"• Individual chunks may still exist on IPFS and nodes",
|
1802
|
+
"• For complete deletion, all chunks should be unpinned manually",
|
1803
|
+
]
|
1804
|
+
print_panel(
|
1805
|
+
"\n".join(success_panel), title="Storage Cancellation Successful"
|
1806
|
+
)
|
1807
|
+
return 0
|
1808
|
+
elif already_deleted:
|
1809
|
+
# Create a panel for the already deleted case
|
1810
|
+
already_panel = [
|
1811
|
+
"[bold yellow]![/bold yellow] This file has already been deleted from storage",
|
1812
|
+
"[dim]The CID was not found in the blockchain storage registry[/dim]",
|
1813
|
+
"",
|
1814
|
+
"This is expected if:",
|
1815
|
+
"• You previously deleted this file",
|
1816
|
+
"• The file was deleted by another process",
|
1817
|
+
"• The file was never stored in the first place",
|
1818
|
+
]
|
1819
|
+
print_panel("\n".join(already_panel), title="Already Deleted")
|
1820
|
+
# Return 0 since this is not an error condition
|
1471
1821
|
return 0
|
1472
1822
|
else:
|
1473
|
-
error
|
1474
|
-
|
1475
|
-
|
1823
|
+
# Create an error panel for all other failures
|
1824
|
+
error_panel = [
|
1825
|
+
"[bold red]×[/bold red] File not found in blockchain storage",
|
1826
|
+
"[dim]The metadata CID was not found in the blockchain storage registry[/dim]",
|
1827
|
+
"",
|
1828
|
+
"Possible reasons:",
|
1829
|
+
"• The CID may be incorrect",
|
1830
|
+
"• You may not be the owner of this file",
|
1831
|
+
]
|
1832
|
+
print_panel("\n".join(error_panel), title="Storage Cancellation Failed")
|
1476
1833
|
return 1
|
1477
|
-
|
1478
1834
|
except Exception as e:
|
1479
|
-
error(f"
|
1835
|
+
error(f"Error deleting erasure-coded file: {e}")
|
1480
1836
|
return 1
|
1481
1837
|
|
1482
1838
|
|
hippius_sdk/cli_parser.py
CHANGED
@@ -197,12 +197,32 @@ def add_storage_commands(subparsers):
|
|
197
197
|
"store", help="Upload a file to IPFS and store it on Substrate"
|
198
198
|
)
|
199
199
|
store_parser.add_argument("file_path", help="Path to file to upload")
|
200
|
+
store_parser.add_argument(
|
201
|
+
"--publish",
|
202
|
+
action="store_true",
|
203
|
+
help="Publish file to IPFS and store on the blockchain (default)",
|
204
|
+
)
|
205
|
+
store_parser.add_argument(
|
206
|
+
"--no-publish",
|
207
|
+
action="store_true",
|
208
|
+
help="Don't publish file to IPFS or store on the blockchain (local only)",
|
209
|
+
)
|
200
210
|
|
201
211
|
# Store directory command
|
202
212
|
store_dir_parser = subparsers.add_parser(
|
203
213
|
"store-dir", help="Upload a directory to IPFS and store all files on Substrate"
|
204
214
|
)
|
205
215
|
store_dir_parser.add_argument("dir_path", help="Path to directory to upload")
|
216
|
+
store_dir_parser.add_argument(
|
217
|
+
"--publish",
|
218
|
+
action="store_true",
|
219
|
+
help="Publish all files to IPFS and store on the blockchain (default)",
|
220
|
+
)
|
221
|
+
store_dir_parser.add_argument(
|
222
|
+
"--no-publish",
|
223
|
+
action="store_true",
|
224
|
+
help="Don't publish files to IPFS or store on the blockchain (local only)",
|
225
|
+
)
|
206
226
|
|
207
227
|
# Pinning status command
|
208
228
|
pinning_status_parser = subparsers.add_parser(
|