hippius 0.1.14__py3-none-any.whl → 0.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
hippius_sdk/cli.py CHANGED
@@ -7,17 +7,15 @@ utilities for encryption key generation, file operations, and marketplace intera
7
7
  """
8
8
 
9
9
  import argparse
10
+ import asyncio
10
11
  import base64
11
12
  import concurrent.futures
12
13
  import getpass
14
+ import inspect
13
15
  import json
14
16
  import os
15
- import random
16
17
  import sys
17
- import threading
18
18
  import time
19
- import uuid
20
- from typing import List, Optional
21
19
 
22
20
  from dotenv import load_dotenv
23
21
 
@@ -31,7 +29,6 @@ from hippius_sdk import (
31
29
  get_active_account,
32
30
  get_all_config,
33
31
  get_config_value,
34
- get_encryption_key,
35
32
  get_seed_phrase,
36
33
  initialize_from_env,
37
34
  list_accounts,
@@ -40,10 +37,8 @@ from hippius_sdk import (
40
37
  save_config,
41
38
  set_active_account,
42
39
  set_config_value,
43
- set_encryption_key,
44
40
  set_seed_phrase,
45
41
  )
46
- from hippius_sdk.substrate import FileInput
47
42
 
48
43
  try:
49
44
  import nacl.secret
@@ -53,10 +48,7 @@ except ImportError:
53
48
  else:
54
49
  ENCRYPTION_AVAILABLE = True
55
50
 
56
- # Load environment variables
57
51
  load_dotenv()
58
-
59
- # Initialize configuration from environment variables
60
52
  initialize_from_env()
61
53
 
62
54
 
@@ -181,12 +173,12 @@ def create_client(args):
181
173
  return client
182
174
 
183
175
 
184
- def handle_download(client, cid, output_path, decrypt=None):
176
+ async def handle_download(client, cid, output_path, decrypt=None):
185
177
  """Handle the download command"""
186
178
  print(f"Downloading {cid} to {output_path}...")
187
179
 
188
180
  # Use the enhanced download method which returns formatted information
189
- result = client.download_file(cid, output_path, decrypt=decrypt)
181
+ result = await client.download_file(cid, output_path, decrypt=decrypt)
190
182
 
191
183
  print(f"Download successful in {result['elapsed_seconds']} seconds!")
192
184
  print(f"Saved to: {result['output_path']}")
@@ -198,10 +190,10 @@ def handle_download(client, cid, output_path, decrypt=None):
198
190
  return 0
199
191
 
200
192
 
201
- def handle_exists(client, cid):
193
+ async def handle_exists(client, cid):
202
194
  """Handle the exists command"""
203
195
  print(f"Checking if CID {cid} exists on IPFS...")
204
- result = client.exists(cid)
196
+ result = await client.exists(cid)
205
197
 
206
198
  # Use the formatted CID from the result
207
199
  formatted_cid = result["formatted_cid"]
@@ -217,12 +209,12 @@ def handle_exists(client, cid):
217
209
  return 0
218
210
 
219
211
 
220
- def handle_cat(client, cid, max_size, decrypt=None):
212
+ async def handle_cat(client, cid, max_size, decrypt=None):
221
213
  """Handle the cat command"""
222
214
  print(f"Retrieving content of CID {cid}...")
223
215
  try:
224
216
  # Use the enhanced cat method with formatting
225
- result = client.cat(cid, max_display_bytes=max_size, decrypt=decrypt)
217
+ result = await client.cat(cid, max_display_bytes=max_size, decrypt=decrypt)
226
218
 
227
219
  # Display file information
228
220
  print(
@@ -255,7 +247,7 @@ def handle_cat(client, cid, max_size, decrypt=None):
255
247
  return 0
256
248
 
257
249
 
258
- def handle_store(client, file_path, miner_ids, encrypt=None):
250
+ async def handle_store(client, file_path, miner_ids, encrypt=None):
259
251
  """Handle the store command"""
260
252
  if not os.path.exists(file_path):
261
253
  print(f"Error: File {file_path} not found")
@@ -265,7 +257,7 @@ def handle_store(client, file_path, miner_ids, encrypt=None):
265
257
  start_time = time.time()
266
258
 
267
259
  # Use the enhanced upload_file method that returns formatted information
268
- result = client.upload_file(file_path, encrypt=encrypt)
260
+ result = await client.upload_file(file_path, encrypt=encrypt)
269
261
 
270
262
  ipfs_elapsed_time = time.time() - start_time
271
263
 
@@ -282,11 +274,23 @@ def handle_store(client, file_path, miner_ids, encrypt=None):
282
274
  start_time = time.time()
283
275
 
284
276
  try:
277
+ # Check if we have credits
278
+ try:
279
+ if hasattr(client.substrate_client, "get_free_credits"):
280
+ credits = client.substrate_client.get_free_credits()
281
+ print(f"Account credits: {credits}")
282
+ if credits <= 0:
283
+ print(
284
+ f"Warning: Account has no free credits (current: {credits}). Transaction may fail."
285
+ )
286
+ except Exception as e:
287
+ print(f"Warning: Could not check free credits: {e}")
288
+
285
289
  # Create a file input object for the marketplace
286
290
  file_input = {"fileHash": result["cid"], "fileName": result["filename"]}
287
291
 
288
- # Store on Substrate
289
- client.substrate_client.storage_request([file_input], miner_ids)
292
+ # Store on Substrate - now it's an async call
293
+ tx_hash = await client.substrate_client.storage_request([file_input], miner_ids)
290
294
 
291
295
  substrate_elapsed_time = time.time() - start_time
292
296
  print(
@@ -307,7 +311,7 @@ def handle_store(client, file_path, miner_ids, encrypt=None):
307
311
  return 0
308
312
 
309
313
 
310
- def handle_store_dir(client, dir_path, miner_ids, encrypt=None):
314
+ async def handle_store_dir(client, dir_path, miner_ids, encrypt=None):
311
315
  """Handle the store-dir command"""
312
316
  if not os.path.isdir(dir_path):
313
317
  print(f"Error: Directory {dir_path} not found")
@@ -331,7 +335,7 @@ def handle_store_dir(client, dir_path, miner_ids, encrypt=None):
331
335
  for file_path, rel_path in all_files:
332
336
  try:
333
337
  print(f" Uploading: {rel_path}")
334
- file_result = client.upload_file(file_path, encrypt=encrypt)
338
+ file_result = await client.upload_file(file_path, encrypt=encrypt)
335
339
  individual_cids.append(
336
340
  {
337
341
  "path": rel_path,
@@ -351,7 +355,7 @@ def handle_store_dir(client, dir_path, miner_ids, encrypt=None):
351
355
  print(f" Error uploading {rel_path}: {e}")
352
356
 
353
357
  # Now upload the entire directory
354
- result = client.upload_directory(dir_path, encrypt=encrypt)
358
+ result = await client.upload_directory(dir_path, encrypt=encrypt)
355
359
 
356
360
  ipfs_elapsed_time = time.time() - start_time
357
361
 
@@ -384,7 +388,7 @@ def handle_store_dir(client, dir_path, miner_ids, encrypt=None):
384
388
  file_inputs.append({"fileHash": item["cid"], "fileName": item["filename"]})
385
389
 
386
390
  # Store all files in a single batch request
387
- client.substrate_client.storage_request(file_inputs, miner_ids)
391
+ tx_hash = await client.substrate_client.storage_request(file_inputs, miner_ids)
388
392
 
389
393
  substrate_elapsed_time = time.time() - start_time
390
394
  print(
@@ -400,7 +404,7 @@ def handle_store_dir(client, dir_path, miner_ids, encrypt=None):
400
404
  return 0
401
405
 
402
406
 
403
- def handle_credits(client, account_address):
407
+ async def handle_credits(client, account_address):
404
408
  """Handle the credits command"""
405
409
  print("Checking free credits for the account...")
406
410
  try:
@@ -436,7 +440,7 @@ def handle_credits(client, account_address):
436
440
 
437
441
  return 1
438
442
 
439
- credits = client.substrate_client.get_free_credits(account_address)
443
+ credits = await client.substrate_client.get_free_credits(account_address)
440
444
  print(f"\nFree credits: {credits:.6f}")
441
445
  raw_value = int(
442
446
  credits * 1_000_000_000_000_000_000
@@ -450,7 +454,7 @@ def handle_credits(client, account_address):
450
454
  return 0
451
455
 
452
456
 
453
- def handle_files(client, account_address, show_all_miners=False):
457
+ async def handle_files(client, account_address, show_all_miners=False):
454
458
  """
455
459
  Display files stored by a user in a nice format.
456
460
 
@@ -490,7 +494,9 @@ def handle_files(client, account_address, show_all_miners=False):
490
494
 
491
495
  # Get files for the account using the new profile-based method
492
496
  print(f"Retrieving files for account: {account_address}")
493
- files = client.substrate_client.get_user_files_from_profile(account_address)
497
+ files = await client.substrate_client.get_user_files_from_profile(
498
+ account_address
499
+ )
494
500
 
495
501
  # Check if any files were found
496
502
  if not files:
@@ -568,7 +574,9 @@ def handle_files(client, account_address, show_all_miners=False):
568
574
  return 0
569
575
 
570
576
 
571
- def handle_ec_files(client, account_address, show_all_miners=False, show_chunks=False):
577
+ async def handle_ec_files(
578
+ client, account_address, show_all_miners=False, show_chunks=False
579
+ ):
572
580
  """Handle the ec-files command to show only erasure-coded files"""
573
581
  print("Looking for erasure-coded files...")
574
582
  try:
@@ -604,7 +612,9 @@ def handle_ec_files(client, account_address, show_all_miners=False, show_chunks=
604
612
  return 1
605
613
 
606
614
  # First, get all user files using the profile method
607
- files = client.substrate_client.get_user_files_from_profile(account_address)
615
+ files = await client.substrate_client.get_user_files_from_profile(
616
+ account_address
617
+ )
608
618
 
609
619
  # Filter for metadata files (ending with .ec_metadata)
610
620
  ec_metadata_files = []
@@ -637,7 +647,7 @@ def handle_ec_files(client, account_address, show_all_miners=False, show_chunks=
637
647
  # Fetch and parse the metadata to get original file info
638
648
  try:
639
649
  # Use the formatted CID, not the raw hex-encoded version
640
- metadata = client.ipfs_client.cat(formatted_cid)
650
+ metadata = await client.ipfs_client.cat(formatted_cid)
641
651
 
642
652
  # Check if we have text content
643
653
  if metadata.get("is_text", False):
@@ -772,8 +782,16 @@ def handle_ec_files(client, account_address, show_all_miners=False, show_chunks=
772
782
  return 0
773
783
 
774
784
 
775
- def handle_erasure_code(
776
- client, file_path, k, m, chunk_size, miner_ids, encrypt=None, verbose=True
785
+ async def handle_erasure_code(
786
+ client,
787
+ file_path,
788
+ k,
789
+ m,
790
+ chunk_size,
791
+ miner_ids,
792
+ encrypt=None,
793
+ publish=False,
794
+ verbose=True,
777
795
  ):
778
796
  """Handle the erasure-code command"""
779
797
  if not os.path.exists(file_path):
@@ -803,7 +821,6 @@ def handle_erasure_code(
803
821
  for root, _, files in os.walk(file_path):
804
822
  if files:
805
823
  example_file = os.path.join(root, files[0])
806
- rel_path = os.path.relpath(example_file, os.path.dirname(file_path))
807
824
  print(f' hippius erasure-code "{example_file}" --k {k} --m {m}')
808
825
  break
809
826
 
@@ -814,8 +831,16 @@ def handle_erasure_code(
814
831
  choice = input("> ").strip().lower()
815
832
 
816
833
  if choice in ("y", "yes"):
817
- return handle_erasure_code_directory(
818
- client, file_path, k, m, chunk_size, miner_ids, encrypt, verbose
834
+ return await handle_erasure_code_directory(
835
+ client,
836
+ file_path,
837
+ k,
838
+ m,
839
+ chunk_size,
840
+ miner_ids,
841
+ encrypt,
842
+ publish,
843
+ verbose,
819
844
  )
820
845
  else:
821
846
  print(f" No files found in directory {file_path}")
@@ -845,11 +870,6 @@ def handle_erasure_code(
845
870
 
846
871
  print(f"Processing {file_path} ({file_size_mb:.2f} MB) with erasure coding...")
847
872
 
848
- # Check if the file is too small for the current chunk size and k value
849
- original_k = k
850
- original_m = m
851
- original_chunk_size = chunk_size
852
-
853
873
  # Calculate how many chunks we would get with current settings
854
874
  potential_chunks = max(1, file_size // chunk_size)
855
875
 
@@ -858,7 +878,7 @@ def handle_erasure_code(
858
878
  # Calculate a new chunk size that would give us exactly k chunks
859
879
  new_chunk_size = max(1024, file_size // k) # Ensure at least 1KB chunks
860
880
 
861
- print(f"Warning: File is too small for the requested parameters.")
881
+ print("Warning: File is too small for the requested parameters.")
862
882
  print(
863
883
  f"Original parameters: k={k}, m={m}, chunk size={chunk_size/1024/1024:.2f} MB"
864
884
  )
@@ -879,7 +899,7 @@ def handle_erasure_code(
879
899
 
880
900
  try:
881
901
  # Use the store_erasure_coded_file method directly from HippiusClient
882
- result = client.store_erasure_coded_file(
902
+ result = await client.store_erasure_coded_file(
883
903
  file_path=file_path,
884
904
  k=k,
885
905
  m=m,
@@ -890,27 +910,73 @@ def handle_erasure_code(
890
910
  verbose=verbose,
891
911
  )
892
912
 
913
+ # Store the original result before potentially overwriting it with publish result
914
+ storage_result = result.copy()
915
+ metadata_cid = storage_result.get("metadata_cid", "unknown")
916
+
917
+ # If publish flag is set, publish to the global IPFS network
918
+ if publish:
919
+ if metadata_cid != "unknown":
920
+ print("\nPublishing to global IPFS network...")
921
+ try:
922
+ # Publish the metadata to the global IPFS network
923
+ publish_result = await client.ipfs_client.publish_global(
924
+ metadata_cid
925
+ )
926
+ if publish_result.get("published", False):
927
+ print("Successfully published to global IPFS network")
928
+ print(f"Access URL: https://ipfs.io/ipfs/{metadata_cid}")
929
+ else:
930
+ print(
931
+ f"Warning: {publish_result.get('message', 'Failed to publish to global network')}"
932
+ )
933
+ except Exception as e:
934
+ print(f"Warning: Failed to publish to global IPFS network: {e}")
935
+
893
936
  elapsed_time = time.time() - start_time
894
937
 
895
938
  print(f"\nErasure coding and storage completed in {elapsed_time:.2f} seconds!")
896
939
 
897
940
  # Display metadata
898
- metadata = result.get("metadata", {})
899
- metadata_cid = result.get("metadata_cid", "unknown")
900
- total_files_stored = result.get("total_files_stored", 0)
941
+ metadata = storage_result.get("metadata", {})
942
+ total_files_stored = storage_result.get("total_files_stored", 0)
901
943
 
902
944
  original_file = metadata.get("original_file", {})
903
945
  erasure_coding = metadata.get("erasure_coding", {})
904
946
 
905
- print("\nErasure Coding Summary:")
906
- print(
907
- f" Original file: {original_file.get('name')} ({original_file.get('size', 0)/1024/1024:.2f} MB)"
908
- )
909
- print(f" File ID: {erasure_coding.get('file_id')}")
910
- print(f" Parameters: k={erasure_coding.get('k')}, m={erasure_coding.get('m')}")
911
- print(f" Total chunks: {len(metadata.get('chunks', []))}")
912
- print(f" Total files stored in marketplace: {total_files_stored}")
913
- print(f" Metadata CID: {metadata_cid}")
947
+ # If metadata_cid is known but metadata is empty, try to get file info from result directly
948
+ if metadata_cid != "unknown" and not original_file:
949
+ file_name = os.path.basename(file_path)
950
+ file_size = os.path.getsize(file_path) if os.path.exists(file_path) else 0
951
+
952
+ # Use direct values from input parameters when metadata is not available
953
+ print("\nErasure Coding Summary:")
954
+ print(f" Original file: {file_name} ({file_size/1024/1024:.2f} MB)")
955
+ print(f" Parameters: k={k}, m={m}")
956
+ print(f" Total files stored in marketplace: {total_files_stored}")
957
+ print(f" Metadata CID: {metadata_cid}")
958
+
959
+ # Add publish status if applicable
960
+ if publish:
961
+ print(f" Published to global IPFS: Yes")
962
+ print(f" Global access URL: https://ipfs.io/ipfs/{metadata_cid}")
963
+ else:
964
+ print("\nErasure Coding Summary:")
965
+ print(
966
+ f" Original file: {original_file.get('name')} ({original_file.get('size', 0)/1024/1024:.2f} MB)"
967
+ )
968
+ print(f" File ID: {erasure_coding.get('file_id')}")
969
+ print(
970
+ f" Parameters: k={erasure_coding.get('k')}, m={erasure_coding.get('m')}"
971
+ )
972
+ print(f" Total chunks: {len(metadata.get('chunks', []))}")
973
+ print(f" Total files stored in marketplace: {total_files_stored}")
974
+ print(f" Metadata CID: {metadata_cid}")
975
+
976
+ # Add publish status if applicable
977
+ if publish:
978
+ print(f" Published to global IPFS: Yes")
979
+ print(f" Global access URL: https://ipfs.io/ipfs/{metadata_cid}")
914
980
 
915
981
  # If we stored in the marketplace
916
982
  if "transaction_hash" in result:
@@ -923,9 +989,13 @@ def handle_erasure_code(
923
989
  print(f" 1. The metadata CID: {metadata_cid}")
924
990
  print(" 2. Access to at least k chunks for each original chunk")
925
991
  print("\nReconstruction command:")
926
- print(
927
- f" hippius reconstruct {metadata_cid} reconstructed_{original_file.get('name')}"
928
- )
992
+
993
+ # Get file name, either from metadata or directly from file path
994
+ output_filename = original_file.get("name")
995
+ if not output_filename:
996
+ output_filename = os.path.basename(file_path)
997
+
998
+ print(f" hippius reconstruct {metadata_cid} reconstructed_{output_filename}")
929
999
 
930
1000
  return 0
931
1001
 
@@ -945,8 +1015,16 @@ def handle_erasure_code(
945
1015
  return 1
946
1016
 
947
1017
 
948
- def handle_erasure_code_directory(
949
- client, dir_path, k, m, chunk_size, miner_ids, encrypt=None, verbose=True
1018
+ async def handle_erasure_code_directory(
1019
+ client,
1020
+ dir_path,
1021
+ k,
1022
+ m,
1023
+ chunk_size,
1024
+ miner_ids,
1025
+ encrypt=None,
1026
+ publish=False,
1027
+ verbose=True,
950
1028
  ):
951
1029
  """Apply erasure coding to each file in a directory individually"""
952
1030
  if not os.path.isdir(dir_path):
@@ -1025,7 +1103,7 @@ def handle_erasure_code_directory(
1025
1103
 
1026
1104
  try:
1027
1105
  # Use the store_erasure_coded_file method directly from HippiusClient
1028
- result = client.store_erasure_coded_file(
1106
+ result = await client.store_erasure_coded_file(
1029
1107
  file_path=file_path,
1030
1108
  k=k,
1031
1109
  m=m,
@@ -1036,16 +1114,38 @@ def handle_erasure_code_directory(
1036
1114
  verbose=False, # Less verbose for batch processing
1037
1115
  )
1038
1116
 
1039
- # Store basic result info
1117
+ metadata_cid = result.get("metadata_cid", "unknown")
1118
+ publishing_status = "Not published"
1119
+
1120
+ # If publish flag is set, publish to the global IPFS network
1121
+ if publish and metadata_cid != "unknown":
1122
+ try:
1123
+ # Publish the metadata to the global IPFS network
1124
+ publish_result = await client.ipfs_client.publish_global(
1125
+ metadata_cid
1126
+ )
1127
+ if publish_result.get("published", False):
1128
+ publishing_status = "Published to global IPFS"
1129
+ else:
1130
+ publishing_status = f"Failed to publish: {publish_result.get('message', 'Unknown error')}"
1131
+ except Exception as e:
1132
+ publishing_status = f"Failed to publish: {str(e)}"
1133
+
1134
+ # Store basic result info with additional publish info
1040
1135
  results.append(
1041
1136
  {
1042
1137
  "file_path": file_path,
1043
- "metadata_cid": result.get("metadata_cid", "unknown"),
1138
+ "metadata_cid": metadata_cid,
1044
1139
  "success": True,
1140
+ "published": publish
1141
+ and publishing_status == "Published to global IPFS",
1045
1142
  }
1046
1143
  )
1047
1144
 
1048
- print(f"Success! Metadata CID: {result.get('metadata_cid', 'unknown')}")
1145
+ status_msg = f"Success! Metadata CID: {metadata_cid}"
1146
+ if publish:
1147
+ status_msg += f" ({publishing_status})"
1148
+ print(status_msg)
1049
1149
  successful += 1
1050
1150
 
1051
1151
  except Exception as e:
@@ -1093,7 +1193,7 @@ def handle_erasure_code_directory(
1093
1193
  return 0 if failed == 0 else 1
1094
1194
 
1095
1195
 
1096
- def handle_reconstruct(client, metadata_cid, output_file, verbose=True):
1196
+ async def handle_reconstruct(client, metadata_cid, output_file, verbose=True):
1097
1197
  """Handle the reconstruct command for erasure-coded files"""
1098
1198
  # Check if zfec is installed
1099
1199
  try:
@@ -1112,13 +1212,12 @@ def handle_reconstruct(client, metadata_cid, output_file, verbose=True):
1112
1212
 
1113
1213
  try:
1114
1214
  # Use the reconstruct_from_erasure_code method
1115
- result = client.reconstruct_from_erasure_code(
1215
+ await client.reconstruct_from_erasure_code(
1116
1216
  metadata_cid=metadata_cid, output_file=output_file, verbose=verbose
1117
1217
  )
1118
1218
 
1119
1219
  elapsed_time = time.time() - start_time
1120
1220
  print(f"\nFile reconstruction completed in {elapsed_time:.2f} seconds!")
1121
- print(f"Reconstructed file saved to: {result}")
1122
1221
 
1123
1222
  return 0
1124
1223
 
@@ -1385,6 +1484,200 @@ def handle_seed_phrase_status(account_name=None):
1385
1484
  return 0
1386
1485
 
1387
1486
 
1487
+ def handle_account_create(client, name, encrypt=False):
1488
+ """Handle creating a new account with a generated seed phrase"""
1489
+ print(f"Creating new account '{name}'...")
1490
+
1491
+ # Get password if encryption is requested
1492
+ password = None
1493
+ if encrypt:
1494
+ try:
1495
+ password = getpass.getpass("Enter password to encrypt seed phrase: ")
1496
+ password_confirm = getpass.getpass("Confirm password: ")
1497
+
1498
+ if password != password_confirm:
1499
+ print("Error: Passwords do not match")
1500
+ return 1
1501
+ except KeyboardInterrupt:
1502
+ print("\nOperation cancelled")
1503
+ return 1
1504
+
1505
+ try:
1506
+ # Create the account
1507
+ result = client.substrate_client.create_account(
1508
+ name, encode=encrypt, password=password
1509
+ )
1510
+
1511
+ print(f"Account created successfully!")
1512
+ print(f"Name: {result['name']}")
1513
+ print(f"Address: {result['address']}")
1514
+ print(f"Seed phrase: {result['mnemonic']}")
1515
+ print()
1516
+ print(
1517
+ "IMPORTANT: Please write down your seed phrase and store it in a safe place."
1518
+ )
1519
+ print(
1520
+ "It is the only way to recover your account if you lose access to this configuration."
1521
+ )
1522
+
1523
+ return 0
1524
+ except Exception as e:
1525
+ print(f"Error creating account: {e}")
1526
+ return 1
1527
+
1528
+
1529
+ def handle_account_export(client, name=None, file_path=None):
1530
+ """Handle exporting an account to a file"""
1531
+ try:
1532
+ # Export the account
1533
+ exported_file = client.substrate_client.export_account(
1534
+ account_name=name, file_path=file_path
1535
+ )
1536
+
1537
+ print(f"Account exported successfully to: {exported_file}")
1538
+ print("The exported file contains your seed phrase in plain text.")
1539
+ print("Please keep this file secure and do not share it with anyone.")
1540
+
1541
+ return 0
1542
+ except Exception as e:
1543
+ print(f"Error exporting account: {e}")
1544
+ return 1
1545
+
1546
+
1547
+ def handle_account_import(client, file_path, encrypt=False):
1548
+ """Handle importing an account from a file"""
1549
+ # Get password if encryption is requested
1550
+ password = None
1551
+ if encrypt:
1552
+ try:
1553
+ password = getpass.getpass("Enter password to encrypt seed phrase: ")
1554
+ password_confirm = getpass.getpass("Confirm password: ")
1555
+
1556
+ if password != password_confirm:
1557
+ print("Error: Passwords do not match")
1558
+ return 1
1559
+ except KeyboardInterrupt:
1560
+ print("\nOperation cancelled")
1561
+ return 1
1562
+
1563
+ try:
1564
+ # Import the account
1565
+ result = client.substrate_client.import_account(
1566
+ file_path, password=password if encrypt else None
1567
+ )
1568
+
1569
+ print(f"Account imported successfully!")
1570
+ print(f"Name: {result['name']}")
1571
+ print(f"Address: {result['address']}")
1572
+
1573
+ if (
1574
+ result.get("original_name")
1575
+ and result.get("original_name") != result["name"]
1576
+ ):
1577
+ print(
1578
+ f"Note: Original name '{result['original_name']}' was already in use, renamed to '{result['name']}'"
1579
+ )
1580
+
1581
+ return 0
1582
+ except Exception as e:
1583
+ print(f"Error importing account: {e}")
1584
+ return 1
1585
+
1586
+
1587
+ async def handle_account_info(client, account_name=None, include_history=False):
1588
+ """Handle showing account information"""
1589
+ try:
1590
+ # Get account info - properly await the async method
1591
+ info = await client.substrate_client.get_account_info(
1592
+ account_name, include_history=include_history
1593
+ )
1594
+
1595
+ active_marker = " (active)" if info.get("is_active", False) else ""
1596
+ encoded_status = (
1597
+ "encrypted" if info.get("seed_phrase_encrypted", False) else "plain text"
1598
+ )
1599
+
1600
+ print(f"Account: {info['name']}{active_marker}")
1601
+ print(f"Address: {info['address']}")
1602
+ print(f"Seed phrase: {encoded_status}")
1603
+
1604
+ # Show storage statistics if available
1605
+ if "storage_stats" in info:
1606
+ stats = info["storage_stats"]
1607
+ if "error" in stats:
1608
+ print(f"Storage stats: Error - {stats['error']}")
1609
+ else:
1610
+ print(f"Files stored: {stats['files']}")
1611
+ print(f"Total storage: {stats['size_formatted']}")
1612
+
1613
+ # Show balance if available
1614
+ if "balance" in info:
1615
+ balance = info["balance"]
1616
+ print("\nAccount Balance:")
1617
+ print(f" Free: {balance['free']:.6f}")
1618
+ print(f" Reserved: {balance['reserved']:.6f}")
1619
+ print(f" Total: {balance['total']:.6f}")
1620
+
1621
+ # Show free credits if available
1622
+ if "free_credits" in info:
1623
+ print(f"Free credits: {info['free_credits']:.6f}")
1624
+
1625
+ # Show file list if requested and available
1626
+ if include_history and "files" in info and info["files"]:
1627
+ print(f"\nStored Files ({len(info['files'])}):")
1628
+ for i, file in enumerate(info["files"], 1):
1629
+ print(f" {i}. {file.get('file_name', 'Unnamed')}")
1630
+ print(f" CID: {file.get('file_hash', 'Unknown')}")
1631
+ print(f" Size: {file.get('size_formatted', 'Unknown')}")
1632
+
1633
+ return 0
1634
+ except Exception as e:
1635
+ print(f"Error retrieving account info: {e}")
1636
+ return 1
1637
+
1638
+
1639
+ async def handle_account_balance(client, account_name=None, watch=False, interval=5):
1640
+ """Handle checking or watching account balance"""
1641
+ try:
1642
+ # Get the account address
1643
+ if account_name:
1644
+ address = get_account_address(account_name)
1645
+ if not address:
1646
+ print(f"Error: Could not find address for account '{account_name}'")
1647
+ return 1
1648
+ else:
1649
+ if client.substrate_client._account_address:
1650
+ address = client.substrate_client._account_address
1651
+ else:
1652
+ print("Error: No account address available")
1653
+ return 1
1654
+
1655
+ if watch:
1656
+ # Watch mode - continuous updates
1657
+ # Note: watch_account_balance may need to be modified to be async-compatible
1658
+ await client.substrate_client.watch_account_balance(address, interval)
1659
+ else:
1660
+ # One-time check
1661
+ balance = await client.substrate_client.get_account_balance(address)
1662
+
1663
+ print(f"Account Balance for: {address}")
1664
+ print(f"Free: {balance['free']:.6f}")
1665
+ print(f"Reserved: {balance['reserved']:.6f}")
1666
+ print(f"Frozen: {balance['frozen']:.6f}")
1667
+ print(f"Total: {balance['total']:.6f}")
1668
+
1669
+ # Show raw values
1670
+ print("\nRaw Values:")
1671
+ print(f"Free: {balance['raw']['free']:,}")
1672
+ print(f"Reserved: {balance['raw']['reserved']:,}")
1673
+ print(f"Frozen: {balance['raw']['frozen']:,}")
1674
+
1675
+ return 0
1676
+ except Exception as e:
1677
+ print(f"Error checking account balance: {e}")
1678
+ return 1
1679
+
1680
+
1388
1681
  def handle_account_list():
1389
1682
  """Handle listing all accounts"""
1390
1683
  accounts = list_accounts()
@@ -1500,6 +1793,261 @@ def handle_default_address_clear():
1500
1793
  return 0
1501
1794
 
1502
1795
 
1796
+ async def handle_pinning_status(
1797
+ client, account_address, verbose=False, show_contents=True
1798
+ ):
1799
+ """Handle the pinning-status command"""
1800
+ print("Checking file pinning status...")
1801
+ try:
1802
+ # Get the account address we're querying
1803
+ if account_address is None:
1804
+ # If no address provided, first try to get from keypair (if available)
1805
+ if (
1806
+ hasattr(client.substrate_client, "_keypair")
1807
+ and client.substrate_client._keypair is not None
1808
+ ):
1809
+ account_address = client.substrate_client._keypair.ss58_address
1810
+ else:
1811
+ # Try to get the default address
1812
+ default_address = get_default_address()
1813
+ if default_address:
1814
+ account_address = default_address
1815
+ else:
1816
+ has_default = get_default_address() is not None
1817
+ print(
1818
+ "Error: No account address provided, and client has no keypair."
1819
+ )
1820
+ if has_default:
1821
+ print(
1822
+ "Please provide an account address with '--account_address' or the default address may be invalid."
1823
+ )
1824
+ else:
1825
+ print(
1826
+ "Please provide an account address with '--account_address' or set a default with:"
1827
+ )
1828
+ print(" hippius address set-default <your_account_address>")
1829
+ return 1
1830
+
1831
+ storage_requests = client.substrate_client.get_pinning_status(account_address)
1832
+
1833
+ # Check if any storage requests were found
1834
+ if not storage_requests:
1835
+ print(f"No pinning requests found for account: {account_address}")
1836
+ return 0
1837
+
1838
+ print(
1839
+ f"\nFound {len(storage_requests)} pinning requests for account: {account_address}"
1840
+ )
1841
+ print("-" * 80)
1842
+
1843
+ # Format and display each storage request
1844
+ for i, request in enumerate(storage_requests, 1):
1845
+ try:
1846
+ print(f"Request {i}:")
1847
+
1848
+ # Display CID if available
1849
+ cid = None
1850
+ if "cid" in request:
1851
+ cid = request.get("cid", "Unknown")
1852
+ print(f" CID: {cid}")
1853
+
1854
+ # Display file name if available
1855
+ if "file_name" in request:
1856
+ file_name = request.get("file_name", "Unknown")
1857
+ print(f" File name: {file_name}")
1858
+ elif "raw_value" in request and "file_name" in request["raw_value"]:
1859
+ # Try to extract from raw value if it's available
1860
+ try:
1861
+ raw_value = request["raw_value"]
1862
+ if isinstance(raw_value, str) and "{" in raw_value:
1863
+ # It's a string representation of a dict, try to extract the file_name
1864
+ if "'file_name': " in raw_value:
1865
+ start_idx = raw_value.find("'file_name': '") + len(
1866
+ "'file_name': '"
1867
+ )
1868
+ end_idx = raw_value.find("'", start_idx)
1869
+ if start_idx > 0 and end_idx > start_idx:
1870
+ file_name = raw_value[start_idx:end_idx]
1871
+ print(f" File name: {file_name}")
1872
+ except Exception:
1873
+ pass
1874
+
1875
+ # Display total replicas if available
1876
+ if "total_replicas" in request:
1877
+ total_replicas = request.get("total_replicas", 0)
1878
+ print(f" Total replicas: {total_replicas}")
1879
+
1880
+ # Display owner if available
1881
+ if "owner" in request:
1882
+ owner = request.get("owner", "Unknown")
1883
+ print(f" Owner: {owner}")
1884
+
1885
+ # Display timestamps if available
1886
+ if "created_at" in request:
1887
+ created_at = request.get("created_at", 0)
1888
+ if created_at > 0:
1889
+ print(f" Created at block: {created_at}")
1890
+
1891
+ if "last_charged_at" in request:
1892
+ last_charged_at = request.get("last_charged_at", 0)
1893
+ if last_charged_at > 0:
1894
+ print(f" Last charged at block: {last_charged_at}")
1895
+
1896
+ # Display assignment status and progress info
1897
+ status_text = "Awaiting validator"
1898
+ if "is_assigned" in request:
1899
+ is_assigned = request.get("is_assigned", False)
1900
+ if is_assigned:
1901
+ status_text = "Assigned to miners"
1902
+
1903
+ # Enhanced status info
1904
+ if "miner_ids" in request and "total_replicas" in request:
1905
+ miner_ids = request.get("miner_ids", [])
1906
+ total_replicas = request.get("total_replicas", 0)
1907
+
1908
+ if len(miner_ids) > 0:
1909
+ if len(miner_ids) == total_replicas:
1910
+ status_text = "Fully pinned"
1911
+ else:
1912
+ status_text = "Partially pinned"
1913
+
1914
+ print(f" Status: {status_text}")
1915
+
1916
+ # Display validator if available
1917
+ if "selected_validator" in request:
1918
+ validator = request.get("selected_validator", "")
1919
+ if validator:
1920
+ print(f" Selected validator: {validator}")
1921
+
1922
+ # Display miners if available
1923
+ if "miner_ids" in request:
1924
+ miner_ids = request.get("miner_ids", [])
1925
+ if miner_ids:
1926
+ print(f" Assigned miners: {len(miner_ids)}")
1927
+ for miner in miner_ids[:3]: # Show first 3 miners
1928
+ print(f" - {miner}")
1929
+ if len(miner_ids) > 3:
1930
+ print(f" ... and {len(miner_ids) - 3} more")
1931
+ else:
1932
+ print(f" Assigned miners: None")
1933
+
1934
+ # Calculate pinning percentage if we have total_replicas
1935
+ if "total_replicas" in request and request["total_replicas"] > 0:
1936
+ total_replicas = request["total_replicas"]
1937
+ pinning_pct = (len(miner_ids) / total_replicas) * 100
1938
+ print(
1939
+ f" Pinning progress: {pinning_pct:.1f}% ({len(miner_ids)}/{total_replicas} miners)"
1940
+ )
1941
+
1942
+ # Display raw data for debugging
1943
+ if verbose:
1944
+ print(" Raw data:")
1945
+ if "raw_key" in request:
1946
+ print(f" Key: {request['raw_key']}")
1947
+ if "raw_value" in request:
1948
+ print(f" Value: {request['raw_value']}")
1949
+
1950
+ # Try to fetch the content and determine if it's a file list by inspecting its contents
1951
+ if show_contents and cid:
1952
+ try:
1953
+ print("\n Fetching contents from IPFS...")
1954
+ # Fetch the contents from IPFS
1955
+ file_data = await client.ipfs_client.cat(cid)
1956
+
1957
+ if file_data and file_data.get("is_text", False):
1958
+ try:
1959
+ # Try to parse as JSON
1960
+ content_json = json.loads(
1961
+ file_data.get("content", "{}")
1962
+ )
1963
+
1964
+ # Detect if this is a file list by checking if it's a list of file objects
1965
+ is_file_list = False
1966
+ if (
1967
+ isinstance(content_json, list)
1968
+ and len(content_json) > 0
1969
+ ):
1970
+ # Check if it looks like a file list
1971
+ sample_item = content_json[0]
1972
+ if isinstance(sample_item, dict) and (
1973
+ "cid" in sample_item
1974
+ or "fileHash" in sample_item
1975
+ or "filename" in sample_item
1976
+ or "fileName" in sample_item
1977
+ ):
1978
+ is_file_list = True
1979
+
1980
+ if is_file_list:
1981
+ # It's a file list - display the files
1982
+ print(
1983
+ f" Content is a file list with {len(content_json)} files:"
1984
+ )
1985
+ print(" " + "-" * 40)
1986
+ for j, file_info in enumerate(content_json, 1):
1987
+ filename = file_info.get(
1988
+ "filename"
1989
+ ) or file_info.get("fileName", "Unknown")
1990
+ file_cid = file_info.get(
1991
+ "cid"
1992
+ ) or file_info.get("fileHash", "Unknown")
1993
+ print(f" File {j}: {filename}")
1994
+ print(f" CID: {file_cid}")
1995
+
1996
+ # Show size if available
1997
+ if "size" in file_info:
1998
+ size = file_info["size"]
1999
+ size_formatted = (
2000
+ client.format_size(size)
2001
+ if hasattr(client, "format_size")
2002
+ else f"{size} bytes"
2003
+ )
2004
+ print(f" Size: {size_formatted}")
2005
+
2006
+ print(" " + "-" * 40)
2007
+ else:
2008
+ # Not a file list, show a compact summary
2009
+ content_type = type(content_json).__name__
2010
+ preview = str(content_json)
2011
+ if len(preview) > 100:
2012
+ preview = preview[:100] + "..."
2013
+ print(f" Content type: JSON {content_type}")
2014
+ print(f" Content preview: {preview}")
2015
+ except json.JSONDecodeError:
2016
+ # Not JSON, just show text preview
2017
+ content = file_data.get("content", "")
2018
+ preview = (
2019
+ content[:100] + "..."
2020
+ if len(content) > 100
2021
+ else content
2022
+ )
2023
+ print(f" Content type: Text")
2024
+ print(f" Content preview: {preview}")
2025
+ else:
2026
+ # Binary data
2027
+ content_size = len(file_data.get("content", b""))
2028
+ size_formatted = (
2029
+ client.format_size(content_size)
2030
+ if hasattr(client, "format_size")
2031
+ else f"{content_size} bytes"
2032
+ )
2033
+ print(f" Content type: Binary data")
2034
+ print(f" Content size: {size_formatted}")
2035
+ except Exception as e:
2036
+ print(f" Error fetching file list contents: {e}")
2037
+
2038
+ print("-" * 80)
2039
+ except Exception as e:
2040
+ print(f" Error displaying request {i}: {e}")
2041
+ print("-" * 80)
2042
+ continue
2043
+
2044
+ except Exception as e:
2045
+ print(f"Error retrieving pinning status: {e}")
2046
+ return 1
2047
+
2048
+ return 0
2049
+
2050
+
1503
2051
  def main():
1504
2052
  """Main CLI entry point for hippius command."""
1505
2053
  # Set up the argument parser
@@ -1532,9 +2080,15 @@ examples:
1532
2080
  # View all miners for stored files
1533
2081
  hippius files --all-miners
1534
2082
 
2083
+ # Check file pinning status
2084
+ hippius pinning-status
2085
+
1535
2086
  # Erasure code a file (Reed-Solomon)
1536
2087
  hippius erasure-code large_file.mp4 --k 3 --m 5
1537
2088
 
2089
+ # Erasure code and publish to global IPFS network
2090
+ hippius erasure-code large_file.avi --publish
2091
+
1538
2092
  # Reconstruct an erasure-coded file
1539
2093
  hippius reconstruct QmMetadataHash reconstructed_file.mp4
1540
2094
  """,
@@ -1678,6 +2232,32 @@ examples:
1678
2232
  )
1679
2233
  )
1680
2234
 
2235
+ # Pinning status command
2236
+ pinning_status_parser = subparsers.add_parser(
2237
+ "pinning-status", help="Check the status of file pinning requests"
2238
+ )
2239
+ pinning_status_parser.add_argument(
2240
+ "--account_address",
2241
+ help="Substrate account to check pinning status for (defaults to your keyfile account)",
2242
+ )
2243
+ pinning_status_parser.add_argument(
2244
+ "--verbose",
2245
+ "-v",
2246
+ action="store_true",
2247
+ help="Show detailed debug information",
2248
+ )
2249
+ pinning_status_parser.add_argument(
2250
+ "--show-contents",
2251
+ action="store_true",
2252
+ default=True,
2253
+ help="Show the contents of file lists (defaults to true)",
2254
+ )
2255
+ pinning_status_parser.add_argument(
2256
+ "--no-contents",
2257
+ action="store_true",
2258
+ help="Don't show the contents of file lists",
2259
+ )
2260
+
1681
2261
  # Erasure Coded Files command
1682
2262
  ec_files_parser = subparsers.add_parser(
1683
2263
  "ec-files", help="View erasure-coded files stored by you or another account"
@@ -1712,6 +2292,9 @@ examples:
1712
2292
  keygen_parser.add_argument(
1713
2293
  "--copy", action="store_true", help="Copy the generated key to the clipboard"
1714
2294
  )
2295
+ keygen_parser.add_argument(
2296
+ "--save", action="store_true", help="Save the key to the Hippius configuration"
2297
+ )
1715
2298
 
1716
2299
  # Erasure code command
1717
2300
  erasure_code_parser = subparsers.add_parser(
@@ -1742,6 +2325,11 @@ examples:
1742
2325
  erasure_code_parser.add_argument(
1743
2326
  "--no-encrypt", action="store_true", help="Do not encrypt the file"
1744
2327
  )
2328
+ erasure_code_parser.add_argument(
2329
+ "--publish",
2330
+ action="store_true",
2331
+ help="Upload and publish the erasure-coded file to the global IPFS network",
2332
+ )
1745
2333
  erasure_code_parser.add_argument(
1746
2334
  "--verbose", action="store_true", help="Enable verbose output", default=True
1747
2335
  )
@@ -1849,6 +2437,77 @@ examples:
1849
2437
  # List accounts
1850
2438
  account_subparsers.add_parser("list", help="List all accounts")
1851
2439
 
2440
+ # Create account
2441
+ create_account_parser = account_subparsers.add_parser(
2442
+ "create", help="Create a new account with a generated seed phrase"
2443
+ )
2444
+ create_account_parser.add_argument(
2445
+ "--name", required=True, help="Name for the new account"
2446
+ )
2447
+ create_account_parser.add_argument(
2448
+ "--encrypt", action="store_true", help="Encrypt the seed phrase with a password"
2449
+ )
2450
+
2451
+ # Export account
2452
+ export_account_parser = account_subparsers.add_parser(
2453
+ "export", help="Export an account to a file"
2454
+ )
2455
+ export_account_parser.add_argument(
2456
+ "--name",
2457
+ help="Name of the account to export (uses active account if not specified)",
2458
+ )
2459
+ export_account_parser.add_argument(
2460
+ "--file",
2461
+ help="Path to save the exported account file (auto-generated if not specified)",
2462
+ )
2463
+
2464
+ # Import account
2465
+ import_account_parser = account_subparsers.add_parser(
2466
+ "import", help="Import an account from a file"
2467
+ )
2468
+ import_account_parser.add_argument(
2469
+ "--file", required=True, help="Path to the account file to import"
2470
+ )
2471
+ import_account_parser.add_argument(
2472
+ "--encrypt",
2473
+ action="store_true",
2474
+ help="Encrypt the imported seed phrase with a password",
2475
+ )
2476
+
2477
+ # Account info
2478
+ info_account_parser = account_subparsers.add_parser(
2479
+ "info", help="Display detailed information about an account"
2480
+ )
2481
+ info_account_parser.add_argument(
2482
+ "account_name",
2483
+ nargs="?",
2484
+ help="Name of the account to show (uses active account if not specified)",
2485
+ )
2486
+ info_account_parser.add_argument(
2487
+ "--history", action="store_true", help="Include usage history in the output"
2488
+ )
2489
+
2490
+ # Account balance
2491
+ balance_account_parser = account_subparsers.add_parser(
2492
+ "balance", help="Check account balance"
2493
+ )
2494
+ balance_account_parser.add_argument(
2495
+ "account_name",
2496
+ nargs="?",
2497
+ help="Name of the account to check (uses active account if not specified)",
2498
+ )
2499
+ balance_account_parser.add_argument(
2500
+ "--watch",
2501
+ action="store_true",
2502
+ help="Watch account balance in real-time until Ctrl+C is pressed",
2503
+ )
2504
+ balance_account_parser.add_argument(
2505
+ "--interval",
2506
+ type=int,
2507
+ default=5,
2508
+ help="Update interval in seconds for watch mode (default: 5)",
2509
+ )
2510
+
1852
2511
  # Switch active account
1853
2512
  switch_account_parser = account_subparsers.add_parser(
1854
2513
  "switch", help="Switch to a different account"
@@ -1897,16 +2556,6 @@ examples:
1897
2556
  parser.print_help()
1898
2557
  return 1
1899
2558
 
1900
- # Special case for keygen which doesn't need client initialization
1901
- if args.command == "keygen":
1902
- # Handle key generation separately
1903
- if args.copy:
1904
- return key_generation_cli()
1905
- else:
1906
- # Create a new argparse namespace with just the copy flag for compatibility
1907
- keygen_args = argparse.Namespace(copy=False)
1908
- return key_generation_cli()
1909
-
1910
2559
  try:
1911
2560
  # Parse miner IDs if provided
1912
2561
  miner_ids = None
@@ -1957,27 +2606,47 @@ examples:
1957
2606
  encryption_key=encryption_key,
1958
2607
  )
1959
2608
 
1960
- # Handle commands
2609
+ # Handle commands - separate async and sync handlers
2610
+ # Create a helper function to handle async handlers
2611
+ def run_async_handler(handler_func, *args, **kwargs):
2612
+ # Check if the handler is async
2613
+ if inspect.iscoroutinefunction(handler_func):
2614
+ # Run the async handler in the event loop
2615
+ return asyncio.run(handler_func(*args, **kwargs))
2616
+ else:
2617
+ # Run the handler directly
2618
+ return handler_func(*args, **kwargs)
2619
+
2620
+ # Handle commands with the helper function
1961
2621
  if args.command == "download":
1962
- return handle_download(client, args.cid, args.output_path, decrypt=decrypt)
2622
+ return run_async_handler(
2623
+ handle_download, client, args.cid, args.output_path, decrypt=decrypt
2624
+ )
1963
2625
 
1964
2626
  elif args.command == "exists":
1965
- return handle_exists(client, args.cid)
2627
+ return run_async_handler(handle_exists, client, args.cid)
1966
2628
 
1967
2629
  elif args.command == "cat":
1968
- return handle_cat(client, args.cid, args.max_size, decrypt=decrypt)
2630
+ return run_async_handler(
2631
+ handle_cat, client, args.cid, args.max_size, decrypt=decrypt
2632
+ )
1969
2633
 
1970
2634
  elif args.command == "store":
1971
- return handle_store(client, args.file_path, miner_ids, encrypt=encrypt)
2635
+ return run_async_handler(
2636
+ handle_store, client, args.file_path, miner_ids, encrypt=encrypt
2637
+ )
1972
2638
 
1973
2639
  elif args.command == "store-dir":
1974
- return handle_store_dir(client, args.dir_path, miner_ids, encrypt=encrypt)
2640
+ return run_async_handler(
2641
+ handle_store_dir, client, args.dir_path, miner_ids, encrypt=encrypt
2642
+ )
1975
2643
 
1976
2644
  elif args.command == "credits":
1977
- return handle_credits(client, args.account_address)
2645
+ return run_async_handler(handle_credits, client, args.account_address)
1978
2646
 
1979
2647
  elif args.command == "files":
1980
- return handle_files(
2648
+ return run_async_handler(
2649
+ handle_files,
1981
2650
  client,
1982
2651
  args.account_address,
1983
2652
  show_all_miners=(
@@ -1985,8 +2654,21 @@ examples:
1985
2654
  ),
1986
2655
  )
1987
2656
 
2657
+ elif args.command == "pinning-status":
2658
+ show_contents = (
2659
+ not args.no_contents if hasattr(args, "no_contents") else True
2660
+ )
2661
+ return run_async_handler(
2662
+ handle_pinning_status,
2663
+ client,
2664
+ args.account_address,
2665
+ verbose=args.verbose,
2666
+ show_contents=show_contents,
2667
+ )
2668
+
1988
2669
  elif args.command == "ec-files":
1989
- return handle_ec_files(
2670
+ return run_async_handler(
2671
+ handle_ec_files,
1990
2672
  client,
1991
2673
  args.account_address,
1992
2674
  show_all_miners=(
@@ -1996,7 +2678,8 @@ examples:
1996
2678
  )
1997
2679
 
1998
2680
  elif args.command == "erasure-code":
1999
- return handle_erasure_code(
2681
+ return run_async_handler(
2682
+ handle_erasure_code,
2000
2683
  client,
2001
2684
  args.file_path,
2002
2685
  args.k,
@@ -2004,14 +2687,34 @@ examples:
2004
2687
  args.chunk_size,
2005
2688
  miner_ids,
2006
2689
  encrypt=args.encrypt,
2690
+ publish=args.publish,
2007
2691
  verbose=args.verbose,
2008
2692
  )
2009
2693
 
2010
2694
  elif args.command == "reconstruct":
2011
- return handle_reconstruct(
2012
- client, args.metadata_cid, args.output_file, verbose=args.verbose
2695
+ return run_async_handler(
2696
+ handle_reconstruct,
2697
+ client,
2698
+ args.metadata_cid,
2699
+ args.output_file,
2700
+ verbose=args.verbose,
2013
2701
  )
2014
2702
 
2703
+ elif args.command == "keygen":
2704
+ # Generate and save an encryption key
2705
+ client = HippiusClient()
2706
+ encryption_key = client.generate_encryption_key()
2707
+ print(f"Generated encryption key: {encryption_key}")
2708
+
2709
+ # Save to config if requested
2710
+ if hasattr(args, "save") and args.save:
2711
+ print("Saving encryption key to configuration...")
2712
+ handle_config_set("encryption", "encryption_key", encryption_key)
2713
+ print(
2714
+ "Encryption key saved. Files will not be automatically encrypted unless you set encryption.encrypt_by_default to true"
2715
+ )
2716
+ return 0
2717
+
2015
2718
  elif args.command == "config":
2016
2719
  if args.config_action == "get":
2017
2720
  return handle_config_get(args.section, args.key)
@@ -2048,6 +2751,24 @@ examples:
2048
2751
  elif args.command == "account":
2049
2752
  if args.account_action == "list":
2050
2753
  return handle_account_list()
2754
+ elif args.account_action == "create":
2755
+ return handle_account_create(client, args.name, args.encrypt)
2756
+ elif args.account_action == "export":
2757
+ return handle_account_export(client, args.name, args.file)
2758
+ elif args.account_action == "import":
2759
+ return handle_account_import(client, args.file, args.encrypt)
2760
+ elif args.account_action == "info":
2761
+ return run_async_handler(
2762
+ handle_account_info, client, args.account_name, args.history
2763
+ )
2764
+ elif args.account_action == "balance":
2765
+ return run_async_handler(
2766
+ handle_account_balance,
2767
+ client,
2768
+ args.account_name,
2769
+ args.watch,
2770
+ args.interval,
2771
+ )
2051
2772
  elif args.account_action == "switch":
2052
2773
  return handle_account_switch(args.account_name)
2053
2774
  elif args.account_action == "delete":