hippius 0.2.8__tar.gz → 0.2.10__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {hippius-0.2.8 → hippius-0.2.10}/PKG-INFO +1 -1
- {hippius-0.2.8 → hippius-0.2.10}/hippius_sdk/__init__.py +1 -1
- {hippius-0.2.8 → hippius-0.2.10}/hippius_sdk/cli_handlers.py +1 -2
- {hippius-0.2.8 → hippius-0.2.10}/hippius_sdk/client.py +1 -1
- {hippius-0.2.8 → hippius-0.2.10}/hippius_sdk/ipfs.py +32 -113
- {hippius-0.2.8 → hippius-0.2.10}/hippius_sdk/ipfs_core.py +47 -127
- {hippius-0.2.8 → hippius-0.2.10}/pyproject.toml +1 -1
- {hippius-0.2.8 → hippius-0.2.10}/README.md +0 -0
- {hippius-0.2.8 → hippius-0.2.10}/hippius_sdk/cli.py +0 -0
- {hippius-0.2.8 → hippius-0.2.10}/hippius_sdk/cli_assets.py +0 -0
- {hippius-0.2.8 → hippius-0.2.10}/hippius_sdk/cli_parser.py +0 -0
- {hippius-0.2.8 → hippius-0.2.10}/hippius_sdk/cli_rich.py +0 -0
- {hippius-0.2.8 → hippius-0.2.10}/hippius_sdk/config.py +0 -0
- {hippius-0.2.8 → hippius-0.2.10}/hippius_sdk/errors.py +0 -0
- {hippius-0.2.8 → hippius-0.2.10}/hippius_sdk/substrate.py +0 -0
- {hippius-0.2.8 → hippius-0.2.10}/hippius_sdk/utils.py +0 -0
@@ -1032,8 +1032,7 @@ async def handle_ec_files(
|
|
1032
1032
|
with tempfile.NamedTemporaryFile() as temp:
|
1033
1033
|
temp_path = temp.name
|
1034
1034
|
|
1035
|
-
|
1036
|
-
await client.ipfs_client.download_file(metadata_cid, temp_path)
|
1035
|
+
await client.download_file(metadata_cid, temp_path)
|
1037
1036
|
|
1038
1037
|
# Open and parse the metadata file
|
1039
1038
|
with open(temp_path, "r") as f:
|
@@ -169,7 +169,7 @@ class HippiusClient:
|
|
169
169
|
requests.RequestException: If the download fails
|
170
170
|
ValueError: If decryption is requested but fails
|
171
171
|
"""
|
172
|
-
return await self.ipfs_client.download_file(cid, output_path,
|
172
|
+
return await self.ipfs_client.download_file(cid, output_path, _=decrypt)
|
173
173
|
|
174
174
|
async def cat(
|
175
175
|
self,
|
@@ -441,20 +441,17 @@ class IPFSClient:
|
|
441
441
|
self,
|
442
442
|
cid: str,
|
443
443
|
output_path: str,
|
444
|
-
|
444
|
+
_: Optional[bool] = None,
|
445
445
|
max_retries: int = 3,
|
446
|
-
skip_directory_check: bool = False,
|
447
446
|
) -> Dict[str, Any]:
|
448
447
|
"""
|
449
448
|
Download a file from IPFS with optional decryption.
|
450
|
-
Supports downloading directories - in that case, a directory structure will be created.
|
451
449
|
|
452
450
|
Args:
|
453
451
|
cid: Content Identifier (CID) of the file to download
|
454
|
-
output_path: Path where the downloaded file
|
455
|
-
|
452
|
+
output_path: Path where the downloaded file will be saved
|
453
|
+
_: Whether to decrypt the file (overrides default)
|
456
454
|
max_retries: Maximum number of retry attempts (default: 3)
|
457
|
-
skip_directory_check: If True, skips directory check (treats as file)
|
458
455
|
|
459
456
|
Returns:
|
460
457
|
Dict[str, Any]: Dictionary containing download results:
|
@@ -464,7 +461,6 @@ class IPFSClient:
|
|
464
461
|
- size_formatted: Human-readable file size
|
465
462
|
- elapsed_seconds: Time taken for the download in seconds
|
466
463
|
- decrypted: Whether the file was decrypted
|
467
|
-
- is_directory: Whether the download was a directory
|
468
464
|
|
469
465
|
Raises:
|
470
466
|
requests.RequestException: If the download fails
|
@@ -472,117 +468,40 @@ class IPFSClient:
|
|
472
468
|
"""
|
473
469
|
start_time = time.time()
|
474
470
|
|
475
|
-
|
476
|
-
|
477
|
-
if not skip_directory_check:
|
478
|
-
# Use the improved ls function to properly detect directories
|
479
|
-
try:
|
480
|
-
# The ls function now properly detects directories
|
481
|
-
ls_result = await self.client.ls(cid)
|
482
|
-
is_directory = ls_result.get("is_directory", False)
|
483
|
-
except Exception:
|
484
|
-
# If ls fails, we'll proceed as if it's a file
|
485
|
-
pass
|
486
|
-
|
487
|
-
# If it's a directory, handle it differently
|
488
|
-
if is_directory:
|
489
|
-
# For directories, we don't need to decrypt each file during the initial download
|
490
|
-
# We'll use the AsyncIPFSClient's download_directory method directly
|
471
|
+
retries = 0
|
472
|
+
while retries < max_retries:
|
491
473
|
try:
|
492
|
-
|
474
|
+
url = f"{self.gateway}/ipfs/{cid}"
|
475
|
+
async with self.client.client.stream(url=url, method="GET") as response:
|
476
|
+
response.raise_for_status()
|
493
477
|
|
494
|
-
# Calculate the total size of the directory
|
495
|
-
total_size = 0
|
496
|
-
for root, _, files in os.walk(output_path):
|
497
|
-
for file in files:
|
498
|
-
file_path = os.path.join(root, file)
|
499
|
-
total_size += os.path.getsize(file_path)
|
500
|
-
|
501
|
-
elapsed_time = time.time() - start_time
|
502
|
-
|
503
|
-
return {
|
504
|
-
"success": True,
|
505
|
-
"output_path": output_path,
|
506
|
-
"size_bytes": total_size,
|
507
|
-
"size_formatted": self.format_size(total_size),
|
508
|
-
"elapsed_seconds": round(elapsed_time, 2),
|
509
|
-
"decrypted": False, # Directories aren't decrypted as a whole
|
510
|
-
"is_directory": True,
|
511
|
-
}
|
512
|
-
except Exception as e:
|
513
|
-
raise RuntimeError(f"Failed to download directory: {str(e)}")
|
514
|
-
|
515
|
-
# For regular files, use the existing logic
|
516
|
-
# Determine if we should decrypt
|
517
|
-
should_decrypt = self.encrypt_by_default if decrypt is None else decrypt
|
518
|
-
|
519
|
-
# Check if decryption is available if requested
|
520
|
-
if should_decrypt and not self.encryption_available:
|
521
|
-
raise ValueError(
|
522
|
-
"Decryption requested but not available. Check that PyNaCl is installed and a valid encryption key is provided."
|
523
|
-
)
|
524
|
-
|
525
|
-
# Create a temporary file if we'll be decrypting
|
526
|
-
temp_file_path = None
|
527
|
-
try:
|
528
|
-
if should_decrypt:
|
529
|
-
# Create a temporary file for the encrypted data
|
530
|
-
temp_file = tempfile.NamedTemporaryFile(delete=False)
|
531
|
-
temp_file_path = temp_file.name
|
532
|
-
temp_file.close()
|
533
|
-
download_path = temp_file_path
|
534
|
-
else:
|
535
|
-
download_path = output_path
|
536
|
-
|
537
|
-
# Pass the skip_directory_check parameter to the core client
|
538
|
-
await self.client.download_file(
|
539
|
-
cid, download_path, skip_directory_check=skip_directory_check
|
540
|
-
)
|
541
|
-
download_success = True
|
542
|
-
|
543
|
-
if not download_success:
|
544
|
-
raise RuntimeError("Failed to download file after multiple attempts")
|
545
|
-
|
546
|
-
# Decrypt if needed
|
547
|
-
if should_decrypt:
|
548
|
-
try:
|
549
|
-
# Read the encrypted data
|
550
|
-
with open(temp_file_path, "rb") as f:
|
551
|
-
encrypted_data = f.read()
|
552
|
-
|
553
|
-
# Decrypt the data
|
554
|
-
decrypted_data = self.decrypt_data(encrypted_data)
|
555
|
-
|
556
|
-
# Write the decrypted data to the output path
|
557
|
-
os.makedirs(
|
558
|
-
os.path.dirname(os.path.abspath(output_path)), exist_ok=True
|
559
|
-
)
|
560
478
|
with open(output_path, "wb") as f:
|
561
|
-
|
562
|
-
|
563
|
-
|
564
|
-
|
565
|
-
|
566
|
-
|
567
|
-
|
568
|
-
|
479
|
+
async for chunk in response.aiter_bytes(chunk_size=8192):
|
480
|
+
f.write(chunk)
|
481
|
+
break
|
482
|
+
|
483
|
+
except (httpx.HTTPError, IOError) as e:
|
484
|
+
retries += 1
|
485
|
+
|
486
|
+
if retries < max_retries:
|
487
|
+
wait_time = 2**retries
|
488
|
+
print(f"Download attempt {retries} failed: {str(e)}")
|
489
|
+
print(f"Retrying in {wait_time} seconds...")
|
490
|
+
time.sleep(wait_time)
|
491
|
+
else:
|
492
|
+
raise
|
569
493
|
|
570
|
-
|
494
|
+
file_size_bytes = os.path.getsize(output_path)
|
495
|
+
elapsed_time = time.time() - start_time
|
571
496
|
|
572
|
-
|
573
|
-
|
574
|
-
|
575
|
-
|
576
|
-
|
577
|
-
|
578
|
-
|
579
|
-
|
580
|
-
}
|
581
|
-
|
582
|
-
finally:
|
583
|
-
# Clean up temporary file if created
|
584
|
-
if temp_file_path and os.path.exists(temp_file_path):
|
585
|
-
os.unlink(temp_file_path)
|
497
|
+
return {
|
498
|
+
"success": True,
|
499
|
+
"output_path": output_path,
|
500
|
+
"size_bytes": file_size_bytes,
|
501
|
+
"size_formatted": self.format_size(file_size_bytes),
|
502
|
+
"elapsed_seconds": round(elapsed_time, 2),
|
503
|
+
"decrypted": _,
|
504
|
+
}
|
586
505
|
|
587
506
|
async def cat(
|
588
507
|
self,
|
@@ -36,7 +36,7 @@ class AsyncIPFSClient:
|
|
36
36
|
api_url = "http://localhost:5001"
|
37
37
|
self.api_url = api_url
|
38
38
|
self.gateway = gateway
|
39
|
-
self.client = httpx.AsyncClient(timeout=
|
39
|
+
self.client = httpx.AsyncClient(timeout=300, follow_redirects=True)
|
40
40
|
|
41
41
|
async def close(self):
|
42
42
|
"""Close the httpx client."""
|
@@ -59,8 +59,14 @@ class AsyncIPFSClient:
|
|
59
59
|
Dict containing the CID and other information
|
60
60
|
"""
|
61
61
|
with open(file_path, "rb") as f:
|
62
|
-
|
63
|
-
|
62
|
+
file_content = f.read()
|
63
|
+
filename = os.path.basename(file_path)
|
64
|
+
# Specify file with name and content type to ensure consistent handling
|
65
|
+
files = {"file": (filename, file_content, "application/octet-stream")}
|
66
|
+
# Explicitly set wrap-with-directory=false to prevent wrapping in directory
|
67
|
+
response = await self.client.post(
|
68
|
+
f"{self.api_url}/api/v0/add?wrap-with-directory=false", files=files
|
69
|
+
)
|
64
70
|
response.raise_for_status()
|
65
71
|
return response.json()
|
66
72
|
|
@@ -75,8 +81,12 @@ class AsyncIPFSClient:
|
|
75
81
|
Returns:
|
76
82
|
Dict containing the CID and other information
|
77
83
|
"""
|
78
|
-
|
79
|
-
|
84
|
+
# Specify file with name and content type to ensure consistent handling
|
85
|
+
files = {"file": (filename, data, "application/octet-stream")}
|
86
|
+
# Explicitly set wrap-with-directory=false to prevent wrapping in directory
|
87
|
+
response = await self.client.post(
|
88
|
+
f"{self.api_url}/api/v0/add?wrap-with-directory=false", files=files
|
89
|
+
)
|
80
90
|
response.raise_for_status()
|
81
91
|
return response.json()
|
82
92
|
|
@@ -149,59 +159,30 @@ class AsyncIPFSClient:
|
|
149
159
|
cid: Content Identifier
|
150
160
|
|
151
161
|
Returns:
|
152
|
-
Dict with links information and
|
162
|
+
Dict with links information and is_directory flag
|
153
163
|
"""
|
154
|
-
|
155
|
-
|
156
|
-
|
157
|
-
|
158
|
-
result = response.json()
|
159
|
-
|
160
|
-
# Add a flag to indicate if this is a directory
|
161
|
-
# A directory has Links and typically more than one or has Type=1
|
162
|
-
is_directory = False
|
163
|
-
if "Objects" in result and len(result["Objects"]) > 0:
|
164
|
-
obj = result["Objects"][0]
|
165
|
-
if "Links" in obj and len(obj["Links"]) > 0:
|
166
|
-
# It has links, likely a directory
|
167
|
-
is_directory = True
|
168
|
-
# Check if any links have Type=1 (directory)
|
169
|
-
for link in obj["Links"]:
|
170
|
-
if link.get("Type") == 1:
|
171
|
-
is_directory = True
|
172
|
-
break
|
173
|
-
|
174
|
-
# Add the flag to the result
|
175
|
-
result["is_directory"] = is_directory
|
176
|
-
return result
|
164
|
+
# Try using the direct IPFS API first (most reliable)
|
165
|
+
response = await self.client.post(f"{self.api_url}/api/v0/ls?arg={cid}")
|
166
|
+
response.raise_for_status()
|
167
|
+
result = response.json()
|
177
168
|
|
178
|
-
|
179
|
-
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
if (
|
187
|
-
|
188
|
-
|
189
|
-
|
190
|
-
|
191
|
-
|
192
|
-
|
193
|
-
|
194
|
-
|
195
|
-
"Objects": [
|
196
|
-
{
|
197
|
-
"Hash": cid,
|
198
|
-
"Links": [], # We can't get links from HTML content easily
|
199
|
-
}
|
200
|
-
],
|
201
|
-
}
|
202
|
-
except Exception as fallback_error:
|
203
|
-
# Re-raise the original error
|
204
|
-
raise e
|
169
|
+
# Add a flag to indicate if this is a directory.
|
170
|
+
# A directory has Links and typically more than one or has Type=1
|
171
|
+
is_directory = False
|
172
|
+
if "Objects" in result and len(result["Objects"]) > 0:
|
173
|
+
obj = result["Objects"][0]
|
174
|
+
if "Links" in obj and len(obj["Links"]) > 0:
|
175
|
+
# It has links, likely a directory
|
176
|
+
is_directory = True
|
177
|
+
# Check if any links have Type=1 (directory)
|
178
|
+
for link in obj["Links"]:
|
179
|
+
if link.get("Type") == 1:
|
180
|
+
is_directory = True
|
181
|
+
break
|
182
|
+
|
183
|
+
# Add the flag to the result
|
184
|
+
result["is_directory"] = is_directory
|
185
|
+
return result
|
205
186
|
|
206
187
|
async def exists(self, cid: str) -> bool:
|
207
188
|
"""
|
@@ -241,7 +222,7 @@ class AsyncIPFSClient:
|
|
241
222
|
ls_result = await self.ls(cid)
|
242
223
|
if ls_result.get("is_directory", False):
|
243
224
|
# It's a directory, use the get command to download it properly
|
244
|
-
return await self.
|
225
|
+
return await self.download_directory(cid, output_path)
|
245
226
|
except Exception:
|
246
227
|
# If ls check fails, continue with regular file download
|
247
228
|
pass
|
@@ -259,80 +240,13 @@ class AsyncIPFSClient:
|
|
259
240
|
# Only try directory fallback if not skipping directory check
|
260
241
|
if not skip_directory_check:
|
261
242
|
try:
|
262
|
-
return await self.
|
243
|
+
return await self.download_directory(cid, output_path)
|
263
244
|
except Exception:
|
264
245
|
pass
|
265
246
|
# Raise the original error
|
266
247
|
raise e
|
267
248
|
|
268
249
|
async def download_directory(self, cid: str, output_path: str) -> str:
|
269
|
-
"""
|
270
|
-
Download a directory from IPFS.
|
271
|
-
|
272
|
-
Args:
|
273
|
-
cid: Content identifier of the directory
|
274
|
-
output_path: Path where to save the directory
|
275
|
-
|
276
|
-
Returns:
|
277
|
-
Path to the saved directory
|
278
|
-
"""
|
279
|
-
# Try the more reliable get command first
|
280
|
-
try:
|
281
|
-
return await self.download_directory_with_get(cid, output_path)
|
282
|
-
except Exception as e:
|
283
|
-
# If get command fails, fall back to ls/cat method
|
284
|
-
try:
|
285
|
-
# Get directory listing
|
286
|
-
ls_result = await self.ls(cid)
|
287
|
-
if not ls_result.get("is_directory", False):
|
288
|
-
raise ValueError(f"CID {cid} is not a directory")
|
289
|
-
|
290
|
-
# Create the directory if it doesn't exist
|
291
|
-
os.makedirs(output_path, exist_ok=True)
|
292
|
-
|
293
|
-
# Extract links from the updated response format
|
294
|
-
links = []
|
295
|
-
# The ls result format is: { "Objects": [ { "Hash": "...", "Links": [...] } ] }
|
296
|
-
if "Objects" in ls_result and len(ls_result["Objects"]) > 0:
|
297
|
-
for obj in ls_result["Objects"]:
|
298
|
-
if "Links" in obj:
|
299
|
-
links.extend(obj["Links"])
|
300
|
-
|
301
|
-
# Download each file in the directory
|
302
|
-
for link in links:
|
303
|
-
file_name = link.get("Name")
|
304
|
-
file_cid = link.get("Hash")
|
305
|
-
file_type = link.get("Type")
|
306
|
-
|
307
|
-
# Skip entries without required data
|
308
|
-
if not (file_name and file_cid):
|
309
|
-
continue
|
310
|
-
|
311
|
-
# Build the path for this file/directory
|
312
|
-
file_path = os.path.join(output_path, file_name)
|
313
|
-
|
314
|
-
if file_type == 1 or file_type == "dir": # Directory type
|
315
|
-
# Recursively download the subdirectory
|
316
|
-
await self.download_directory(file_cid, file_path)
|
317
|
-
else: # File type
|
318
|
-
# Download the file
|
319
|
-
content = await self.cat(file_cid)
|
320
|
-
os.makedirs(
|
321
|
-
os.path.dirname(os.path.abspath(file_path)), exist_ok=True
|
322
|
-
)
|
323
|
-
with open(file_path, "wb") as f:
|
324
|
-
f.write(content)
|
325
|
-
|
326
|
-
return output_path
|
327
|
-
except Exception as fallback_error:
|
328
|
-
# If both methods fail, raise a more detailed error
|
329
|
-
raise RuntimeError(
|
330
|
-
f"Failed to download directory: get error: {e}, ls/cat error: {fallback_error}"
|
331
|
-
)
|
332
|
-
|
333
|
-
return output_path
|
334
|
-
|
335
|
-
async def download_directory_with_get(self, cid: str, output_path: str) -> str:
|
336
250
|
"""
|
337
251
|
Download a directory from IPFS by recursively fetching its contents.
|
338
252
|
|
@@ -345,6 +259,13 @@ class AsyncIPFSClient:
|
|
345
259
|
"""
|
346
260
|
# First, get the directory listing to find all contents
|
347
261
|
try:
|
262
|
+
import uuid
|
263
|
+
|
264
|
+
# Handle potential file/directory collision
|
265
|
+
if os.path.exists(output_path) and not os.path.isdir(output_path):
|
266
|
+
# Generate unique path by adding a UUID suffix
|
267
|
+
output_path = f"{output_path}_{str(uuid.uuid4())[:8]}"
|
268
|
+
|
348
269
|
ls_result = await self.ls(cid)
|
349
270
|
|
350
271
|
# Create target directory
|
@@ -362,7 +283,6 @@ class AsyncIPFSClient:
|
|
362
283
|
link_name = link.get("Name")
|
363
284
|
link_hash = link.get("Hash")
|
364
285
|
link_type = link.get("Type")
|
365
|
-
link_size = link.get("Size", 0)
|
366
286
|
|
367
287
|
if not (link_name and link_hash):
|
368
288
|
continue # Skip if missing essential data
|
@@ -372,7 +292,7 @@ class AsyncIPFSClient:
|
|
372
292
|
|
373
293
|
if link_type == 1 or str(link_type) == "1" or link_type == "dir":
|
374
294
|
# It's a directory - recursively download
|
375
|
-
await self.
|
295
|
+
await self.download_directory(link_hash, target_path)
|
376
296
|
else:
|
377
297
|
# It's a file - download it
|
378
298
|
try:
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|