ratio1 3.4.101__py3-none-any.whl → 3.4.103__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ratio1/_ver.py CHANGED
@@ -1,4 +1,4 @@
1
- __VER__ = "3.4.101"
1
+ __VER__ = "3.4.103"
2
2
 
3
3
  if __name__ == "__main__":
4
4
  with open("pyproject.toml", "rt") as fd:
ratio1/const/evm_net.py CHANGED
@@ -23,6 +23,9 @@ class EvmNetData:
23
23
  EE_ORACLE_API_URL_KEY = 'EE_ORACLE_API_URL'
24
24
  EE_DEEPLOY_API_URL_KEY = 'EE_DEEPLOY_API_URL'
25
25
  EE_DAPP_API_URL_KEY = 'EE_DAPP_API_URL_KEY'
26
+ EE_DAPP_APP_URL_KEY = 'EE_DAPP_APP_URL'
27
+ EE_EXPLORER_APP_URL_KEY = 'EE_EXPLORER_APP_URL'
28
+ EE_DEEPLOY_APP_URL_KEY = 'EE_DEEPLOY_APP_URL'
26
29
 
27
30
  # endclass EvmNetData
28
31
 
@@ -528,7 +531,7 @@ EVM_NET_DATA = {
528
531
  EvmNetData.DAUTH_MND_ADDR_KEY : "0x0C431e546371C87354714Fcc1a13365391A549E2",
529
532
  EvmNetData.DAUTH_PROXYAPI_ADDR_KEY : "0xa2fDD4c7E93790Ff68a01f01AA789D619F12c6AC",
530
533
  EvmNetData.DAUTH_POAI_MANAGER_ADDR_KEY : "0xa8d7FFCE91a888872A9f5431B4Dd6c0c135055c1",
531
- EvmNetData.DAUTH_RPC_KEY : "https://base-mainnet.public.blastapi.io",
534
+ EvmNetData.DAUTH_RPC_KEY : "https://base-rpc.publicnode.com",
532
535
  EvmNetData.EE_GENESIS_EPOCH_DATE_KEY : "2025-05-23 16:00:00",
533
536
  EvmNetData.EE_EPOCH_INTERVALS_KEY : 24,
534
537
  EvmNetData.EE_EPOCH_INTERVAL_SECONDS_KEY : 3600,
@@ -536,6 +539,9 @@ EVM_NET_DATA = {
536
539
  EvmNetData.EE_ORACLE_API_URL_KEY : "https://oracle.ratio1.ai",
537
540
  EvmNetData.EE_DEEPLOY_API_URL_KEY : "https://deeploy-api.ratio1.ai",
538
541
  EvmNetData.EE_DAPP_API_URL_KEY : "https://dapp-api.ratio1.ai",
542
+ EvmNetData.EE_DAPP_APP_URL_KEY : "https://app.ratio1.ai",
543
+ EvmNetData.EE_EXPLORER_APP_URL_KEY : "https://explorer.ratio1.ai",
544
+ EvmNetData.EE_DEEPLOY_APP_URL_KEY : "https://deeploy.ratio1.ai",
539
545
  EvmNetData.DAUTH_GET_ORACLES_ABI : _DAUTH_ABI_GET_ORACLES,
540
546
  },
541
547
 
@@ -555,6 +561,9 @@ EVM_NET_DATA = {
555
561
  EvmNetData.EE_ORACLE_API_URL_KEY : "https://testnet-oracle.ratio1.ai",
556
562
  EvmNetData.EE_DEEPLOY_API_URL_KEY : "https://testnet-deeploy-api.ratio1.ai",
557
563
  EvmNetData.EE_DAPP_API_URL_KEY : "https://testnet-dapp-api.ratio1.ai",
564
+ EvmNetData.EE_DAPP_APP_URL_KEY : "https://testnet-app.ratio1.ai",
565
+ EvmNetData.EE_EXPLORER_APP_URL_KEY : "https://testnet-explorer.ratio1.ai",
566
+ EvmNetData.EE_DEEPLOY_APP_URL_KEY : "https://testnet-deeploy.ratio1.ai",
558
567
  EvmNetData.DAUTH_GET_ORACLES_ABI : _DAUTH_ABI_GET_ORACLES,
559
568
  },
560
569
 
@@ -575,6 +584,9 @@ EVM_NET_DATA = {
575
584
  EvmNetData.EE_ORACLE_API_URL_KEY : "https://devnet-oracle.ratio1.ai",
576
585
  EvmNetData.EE_DEEPLOY_API_URL_KEY : "https://devnet-deeploy-api.ratio1.ai",
577
586
  EvmNetData.EE_DAPP_API_URL_KEY : "https://devnet-dapp-api.ratio1.ai",
587
+ EvmNetData.EE_DAPP_APP_URL_KEY : "https://devnet-app.ratio1.ai",
588
+ EvmNetData.EE_EXPLORER_APP_URL_KEY : "https://devnet-explorer.ratio1.ai",
589
+ EvmNetData.EE_DEEPLOY_APP_URL_KEY : "https://devnet-deeploy.ratio1.ai",
578
590
  EvmNetData.DAUTH_GET_ORACLES_ABI : _DAUTH_ABI_GET_ORACLES,
579
591
  },
580
592
  }
ratio1/ipfs/r1fs.py CHANGED
@@ -922,9 +922,9 @@ class R1FSEngine:
922
922
  if not os.path.isfile(file_path):
923
923
  raise FileNotFoundError(f"File not found: {file_path}")
924
924
 
925
- # Check file size and throw an error if larger than 2 GB.
925
+ # Check file size and throw an error if larger than 5 GB.
926
926
  file_size = os.path.getsize(file_path)
927
- if file_size > 2 * 1024 * 1024 * 1024:
927
+ if file_size > 5 * 1024 * 1024 * 1024:
928
928
  raise ValueError(f"File {file_path} is too large ({file_size} bytes). Maximum allowed size is 2 GB.")
929
929
 
930
930
  key = self._hash_secret(secret) # mandatory passphrase
@@ -1522,6 +1522,589 @@ class R1FSEngine:
1522
1522
  result = False
1523
1523
  return result
1524
1524
 
1525
+
1526
+ ########## START DELETE METHODS ##########
1527
+ if True:
1528
+ def is_pinned(self, cid: str, show_logs: bool = False) -> bool:
1529
+ """
1530
+ Check if a CID is pinned locally.
1531
+
1532
+ Parameters
1533
+ ----------
1534
+ cid : str
1535
+ The CID to check.
1536
+
1537
+ show_logs : bool, optional
1538
+ Whether to show logs. Default is False.
1539
+
1540
+ Returns
1541
+ -------
1542
+ bool
1543
+ True if the CID is pinned, False otherwise.
1544
+
1545
+ Examples
1546
+ --------
1547
+ >>> if engine.is_pinned("QmHash123..."):
1548
+ >>> print("File is pinned")
1549
+ """
1550
+ try:
1551
+ output = self.__run_command(
1552
+ ["ipfs", "pin", "ls", "--type=recursive", cid],
1553
+ raise_on_error=False,
1554
+ show_logs=False
1555
+ )
1556
+ is_pinned = cid in output
1557
+ if show_logs:
1558
+ self.Pd(f"CID {cid} pinned: {is_pinned}")
1559
+ return is_pinned
1560
+ except Exception as e:
1561
+ if show_logs:
1562
+ self.Pd(f"Error checking if CID {cid} is pinned: {e}")
1563
+ return False
1564
+
1565
+ def unpin_file(
1566
+ self,
1567
+ cid: str,
1568
+ unpin_remote: bool = True,
1569
+ show_logs: bool = True,
1570
+ raise_on_error: bool = False
1571
+ ) -> bool:
1572
+ """
1573
+ Unpin a file from R1FS locally and optionally on the relay.
1574
+ This marks the file for garbage collection but does not immediately delete it.
1575
+
1576
+ Parameters
1577
+ ----------
1578
+ cid : str
1579
+ The CID to unpin.
1580
+
1581
+ unpin_remote : bool, optional
1582
+ Whether to also unpin from the relay. Default is True.
1583
+
1584
+ show_logs : bool, optional
1585
+ Whether to show logs. Default is True.
1586
+
1587
+ raise_on_error : bool, optional
1588
+ If True, raise an Exception on errors. Otherwise logs them. Default is False.
1589
+
1590
+ Returns
1591
+ -------
1592
+ bool
1593
+ True if unpinning was successful, False otherwise.
1594
+
1595
+ Examples
1596
+ --------
1597
+ >>> # Unpin locally and on relay
1598
+ >>> engine.unpin_file("QmHash123...")
1599
+
1600
+ >>> # Unpin only locally
1601
+ >>> engine.unpin_file("QmHash123...", unpin_remote=False)
1602
+ """
1603
+ if cid in [None, ""]:
1604
+ msg = "CID parameter cannot be None or empty"
1605
+ if raise_on_error:
1606
+ raise ValueError(msg)
1607
+ else:
1608
+ if show_logs:
1609
+ self.P(msg, color='r')
1610
+ return False
1611
+
1612
+ success = True
1613
+
1614
+ try:
1615
+ # First, unpin locally
1616
+ result = self.__run_command(
1617
+ ["ipfs", "pin", "rm", cid],
1618
+ raise_on_error=raise_on_error,
1619
+ show_logs=False
1620
+ )
1621
+
1622
+ if show_logs:
1623
+ self.Pd(f"Unpinned CID locally: {cid}")
1624
+
1625
+ except Exception as e:
1626
+ msg = f"Error unpinning CID {cid} locally: {e}"
1627
+ success = False
1628
+ if raise_on_error:
1629
+ raise RuntimeError(msg)
1630
+ else:
1631
+ if show_logs:
1632
+ self.P(msg, color='r')
1633
+ return False
1634
+
1635
+ # Then, notify the relay to unpin (mirrors add_file behavior)
1636
+ if unpin_remote and self.__ipfs_relay_api is not None:
1637
+ try:
1638
+ request_url = f"{self.__ipfs_relay_api}/api/v0/pin/rm?arg={cid}"
1639
+ response = requests.post(
1640
+ request_url,
1641
+ auth=HTTPBasicAuth(self.__ipfs_api_key_username, self.__ipfs_api_key_password),
1642
+ verify=self.__ipfs_certificate_path
1643
+ )
1644
+
1645
+ if response.status_code == 200:
1646
+ if show_logs:
1647
+ self.Pd(f"Relay successfully notified to unpin CID={cid}")
1648
+ else:
1649
+ msg = f"Failed to notify relay to unpin CID {cid}: {response.text}"
1650
+ if raise_on_error:
1651
+ raise RuntimeError(msg)
1652
+ else:
1653
+ if show_logs:
1654
+ self.P(msg, color='r')
1655
+ success = False
1656
+ #end if response status code
1657
+ except requests.RequestException as e:
1658
+ msg = f"Error notifying relay to unpin CID {cid}: {e}"
1659
+ if raise_on_error:
1660
+ raise RuntimeError(msg)
1661
+ else:
1662
+ if show_logs:
1663
+ self.P(msg, color='r')
1664
+ success = False
1665
+ #end try
1666
+ #end if relay API exists
1667
+
1668
+ # Remove from tracking dicts if present
1669
+ if success:
1670
+ self.__uploaded_files.pop(cid, None)
1671
+ self.__downloaded_files.pop(cid, None)
1672
+
1673
+ return success
1674
+
1675
+ def delete_file(
1676
+ self,
1677
+ cid: str,
1678
+ unpin_remote: bool = True,
1679
+ run_gc: bool = False,
1680
+ cleanup_local_files: bool = False,
1681
+ show_logs: bool = True,
1682
+ raise_on_error: bool = False
1683
+ ) -> bool:
1684
+ """
1685
+ Delete a file from R1FS by unpinning it locally and optionally on the relay,
1686
+ then optionally running garbage collection.
1687
+
1688
+ Note: Unpinning removes the file from your local node and (optionally) the relay.
1689
+ However, if other nodes have pinned this content, it may remain accessible on the network.
1690
+
1691
+ Parameters
1692
+ ----------
1693
+ cid : str
1694
+ The CID to delete.
1695
+
1696
+ unpin_remote : bool, optional
1697
+ Whether to also unpin from the relay. Default is True.
1698
+ Set to False if you only want to free local storage.
1699
+
1700
+ run_gc : bool, optional
1701
+ Whether to run garbage collection immediately after unpinning.
1702
+ Default is False (GC runs automatically with --enable-gc flag on daemon).
1703
+ Set to True for immediate disk space reclamation.
1704
+
1705
+ cleanup_local_files : bool, optional
1706
+ Whether to also remove local downloaded files from the downloads directory.
1707
+ Default is False.
1708
+
1709
+ show_logs : bool, optional
1710
+ Whether to show logs. Default is True.
1711
+
1712
+ raise_on_error : bool, optional
1713
+ If True, raise an Exception on errors. Otherwise logs them. Default is False.
1714
+
1715
+ Returns
1716
+ -------
1717
+ bool
1718
+ True if deletion was successful, False otherwise.
1719
+
1720
+ Examples
1721
+ --------
1722
+ >>> # Simple delete (unpin locally and on relay)
1723
+ >>> engine.delete_file("QmHash123...")
1724
+
1725
+ >>> # Delete with immediate garbage collection
1726
+ >>> engine.delete_file("QmHash123...", run_gc=True)
1727
+
1728
+ >>> # Delete only locally, keep on relay
1729
+ >>> engine.delete_file("QmHash123...", unpin_remote=False)
1730
+
1731
+ >>> # Full cleanup: unpin everywhere, GC, and remove local files
1732
+ >>> engine.delete_file("QmHash123...", run_gc=True, cleanup_local_files=True)
1733
+ """
1734
+ if cid in [None, ""]:
1735
+ msg = "CID parameter cannot be None or empty"
1736
+ if raise_on_error:
1737
+ raise ValueError(msg)
1738
+ else:
1739
+ if show_logs:
1740
+ self.P(msg, color='r')
1741
+ return False
1742
+
1743
+ try:
1744
+ # Unpin the file (locally and optionally on relay)
1745
+ success = self.unpin_file(
1746
+ cid=cid,
1747
+ unpin_remote=unpin_remote,
1748
+ show_logs=show_logs,
1749
+ raise_on_error=raise_on_error
1750
+ )
1751
+
1752
+ if not success:
1753
+ return False
1754
+
1755
+ # Optional: clean up local downloaded files
1756
+ if cleanup_local_files:
1757
+ local_folder = os.path.join(self.__downloads_dir, cid)
1758
+ if os.path.exists(local_folder):
1759
+ try:
1760
+ if os.path.isdir(local_folder):
1761
+ shutil.rmtree(local_folder)
1762
+ else:
1763
+ os.remove(local_folder)
1764
+ if show_logs:
1765
+ self.Pd(f"Removed local files for CID {cid}")
1766
+ except Exception as e:
1767
+ msg = f"Error removing local files for CID {cid}: {e}"
1768
+ if show_logs:
1769
+ self.P(msg, color='r')
1770
+ # Don't fail the whole operation if local cleanup fails
1771
+
1772
+ # Optional: run garbage collection immediately
1773
+ if run_gc:
1774
+ if show_logs:
1775
+ self.Pd("Running garbage collection...")
1776
+ try:
1777
+ gc_result = self.__run_command(
1778
+ ["ipfs", "repo", "gc"],
1779
+ raise_on_error=False,
1780
+ show_logs=False # GC output can be very verbose
1781
+ )
1782
+ if show_logs:
1783
+ self.P(f"Deleted file {cid} and ran garbage collection", color='g')
1784
+ except Exception as e:
1785
+ msg = f"Error running garbage collection: {e}"
1786
+ if show_logs:
1787
+ self.P(msg, color='r')
1788
+ # Don't fail if GC fails - the unpin was successful
1789
+ else:
1790
+ if show_logs:
1791
+ location = "local + relay" if unpin_remote else "local only"
1792
+ self.P(f"Unpinned file {cid} ({location})", color='g')
1793
+
1794
+ return True
1795
+
1796
+ except Exception as e:
1797
+ msg = f"Error deleting file {cid}: {e}"
1798
+ if raise_on_error:
1799
+ raise RuntimeError(msg)
1800
+ else:
1801
+ if show_logs:
1802
+ self.P(msg, color='r')
1803
+ return False
1804
+
1805
+ def delete_files(
1806
+ self,
1807
+ cids: list,
1808
+ unpin_remote: bool = True,
1809
+ run_gc_after_all: bool = True,
1810
+ cleanup_local_files: bool = False,
1811
+ show_logs: bool = True,
1812
+ raise_on_error: bool = False,
1813
+ continue_on_error: bool = True
1814
+ ) -> dict:
1815
+ """
1816
+ Delete multiple files from R1FS in bulk.
1817
+ More efficient than calling delete_file repeatedly as it can run GC once at the end.
1818
+
1819
+ Parameters
1820
+ ----------
1821
+ cids : list
1822
+ List of CIDs to delete.
1823
+
1824
+ unpin_remote : bool, optional
1825
+ Whether to also unpin from the relay. Default is True.
1826
+
1827
+ run_gc_after_all : bool, optional
1828
+ Whether to run garbage collection once after all deletions.
1829
+ Default is True. More efficient than running GC for each file.
1830
+
1831
+ cleanup_local_files : bool, optional
1832
+ Whether to also remove local downloaded files. Default is False.
1833
+
1834
+ show_logs : bool, optional
1835
+ Whether to show logs. Default is True.
1836
+
1837
+ raise_on_error : bool, optional
1838
+ If True, raise an Exception on first error. Default is False.
1839
+
1840
+ continue_on_error : bool, optional
1841
+ Whether to continue deleting remaining files if one fails.
1842
+ Only used if raise_on_error is False. Default is True.
1843
+
1844
+ Returns
1845
+ -------
1846
+ dict
1847
+ Dictionary with keys:
1848
+ - 'success': list of successfully deleted CIDs
1849
+ - 'failed': list of CIDs that failed to delete
1850
+ - 'total': total number of CIDs processed
1851
+ - 'success_count': number of successful deletions
1852
+ - 'failed_count': number of failed deletions
1853
+
1854
+ Examples
1855
+ --------
1856
+ >>> # Delete multiple files efficiently
1857
+ >>> cids = ["QmHash1...", "QmHash2...", "QmHash3..."]
1858
+ >>> result = engine.delete_files(cids)
1859
+ >>> print(f"Deleted {result['success_count']} of {result['total']} files")
1860
+
1861
+ >>> # Delete locally only, no GC
1862
+ >>> result = engine.delete_files(cids, unpin_remote=False, run_gc_after_all=False)
1863
+
1864
+ >>> # Full cleanup
1865
+ >>> result = engine.delete_files(
1866
+ >>> cids,
1867
+ >>> cleanup_local_files=True,
1868
+ >>> run_gc_after_all=True
1869
+ >>> )
1870
+ """
1871
+ if not isinstance(cids, list):
1872
+ msg = "cids parameter must be a list"
1873
+ if raise_on_error:
1874
+ raise ValueError(msg)
1875
+ else:
1876
+ if show_logs:
1877
+ self.P(msg, color='r')
1878
+ return {
1879
+ 'success': [],
1880
+ 'failed': cids if isinstance(cids, list) else [cids],
1881
+ 'total': 0,
1882
+ 'success_count': 0,
1883
+ 'failed_count': 0
1884
+ }
1885
+
1886
+ if len(cids) == 0:
1887
+ if show_logs:
1888
+ self.Pd("No CIDs provided for deletion")
1889
+ return {
1890
+ 'success': [],
1891
+ 'failed': [],
1892
+ 'total': 0,
1893
+ 'success_count': 0,
1894
+ 'failed_count': 0
1895
+ }
1896
+
1897
+ if show_logs:
1898
+ self.P(f"Deleting {len(cids)} files from R1FS...", color='m')
1899
+
1900
+ success_list = []
1901
+ failed_list = []
1902
+
1903
+ for i, cid in enumerate(cids):
1904
+ if show_logs:
1905
+ self.Pd(f"Processing {i+1}/{len(cids)}: {cid}")
1906
+
1907
+ try:
1908
+ # Delete without running GC for each file (we'll do it once at the end)
1909
+ result = self.delete_file(
1910
+ cid=cid,
1911
+ unpin_remote=unpin_remote,
1912
+ run_gc=False, # Don't GC per file
1913
+ cleanup_local_files=cleanup_local_files,
1914
+ show_logs=False, # Reduce log spam
1915
+ raise_on_error=raise_on_error
1916
+ )
1917
+
1918
+ if result:
1919
+ success_list.append(cid)
1920
+ else:
1921
+ failed_list.append(cid)
1922
+ if not continue_on_error:
1923
+ break
1924
+ except Exception as e:
1925
+ failed_list.append(cid)
1926
+ if show_logs:
1927
+ self.P(f"Error deleting CID {cid}: {e}", color='r')
1928
+ if raise_on_error:
1929
+ raise
1930
+ if not continue_on_error:
1931
+ break
1932
+
1933
+ # Run garbage collection once at the end (more efficient)
1934
+ if run_gc_after_all and len(success_list) > 0:
1935
+ if show_logs:
1936
+ self.P("Running garbage collection for all deleted files...", color='m')
1937
+ try:
1938
+ gc_result = self.__run_command(
1939
+ ["ipfs", "repo", "gc"],
1940
+ raise_on_error=False,
1941
+ show_logs=False
1942
+ )
1943
+ if show_logs:
1944
+ self.Pd("Garbage collection completed")
1945
+ except Exception as e:
1946
+ if show_logs:
1947
+ self.P(f"Warning: Garbage collection failed: {e}", color='r')
1948
+ # Don't fail the whole operation if GC fails
1949
+
1950
+ result = {
1951
+ 'success': success_list,
1952
+ 'failed': failed_list,
1953
+ 'total': len(cids),
1954
+ 'success_count': len(success_list),
1955
+ 'failed_count': len(failed_list)
1956
+ }
1957
+
1958
+ if show_logs:
1959
+ location = "local + relay" if unpin_remote else "local only"
1960
+ self.P(
1961
+ f"Bulk delete completed: {result['success_count']}/{result['total']} succeeded ({location})",
1962
+ color='g' if result['failed_count'] == 0 else 'y'
1963
+ )
1964
+ if result['failed_count'] > 0:
1965
+ self.P(f"Failed to delete {result['failed_count']} files", color='r')
1966
+
1967
+ return result
1968
+
1969
+ def garbage_collect(self, show_logs: bool = True, raise_on_error: bool = False) -> bool:
1970
+ """
1971
+ Manually run IPFS garbage collection to reclaim disk space from unpinned files.
1972
+
1973
+ Note: The IPFS daemon runs with --enable-gc flag, so GC happens automatically.
1974
+ This method is useful for immediate disk space reclamation.
1975
+
1976
+ Parameters
1977
+ ----------
1978
+ show_logs : bool, optional
1979
+ Whether to show logs. Default is True.
1980
+
1981
+ raise_on_error : bool, optional
1982
+ If True, raise an Exception on errors. Default is False.
1983
+
1984
+ Returns
1985
+ -------
1986
+ bool
1987
+ True if garbage collection succeeded, False otherwise.
1988
+
1989
+ Examples
1990
+ --------
1991
+ >>> # Run garbage collection manually
1992
+ >>> engine.garbage_collect()
1993
+ """
1994
+ try:
1995
+ if show_logs:
1996
+ self.P("Running IPFS garbage collection...", color='m')
1997
+
1998
+ result = self.__run_command(
1999
+ ["ipfs", "repo", "gc"],
2000
+ raise_on_error=raise_on_error,
2001
+ show_logs=False # GC output can be very verbose
2002
+ )
2003
+
2004
+ if show_logs:
2005
+ # Count lines in output to show how many blocks were freed
2006
+ lines = result.strip().split('\n') if result else []
2007
+ blocks_removed = len([l for l in lines if l.strip()])
2008
+ self.P(f"Garbage collection completed (~{blocks_removed} blocks processed)", color='g')
2009
+
2010
+ return True
2011
+
2012
+ except Exception as e:
2013
+ msg = f"Error running garbage collection: {e}"
2014
+ if raise_on_error:
2015
+ raise RuntimeError(msg)
2016
+ else:
2017
+ if show_logs:
2018
+ self.P(msg, color='r')
2019
+ return False
2020
+
2021
+ def cleanup_downloads(
2022
+ self,
2023
+ cid: str = None,
2024
+ show_logs: bool = True
2025
+ ) -> int:
2026
+ """
2027
+ Clean up downloaded files from the local downloads directory.
2028
+ This does NOT unpin files from IPFS, only removes local file copies.
2029
+
2030
+ Parameters
2031
+ ----------
2032
+ cid : str, optional
2033
+ Specific CID to clean up. If None, cleans all downloads.
2034
+
2035
+ show_logs : bool, optional
2036
+ Whether to show logs. Default is True.
2037
+
2038
+ Returns
2039
+ -------
2040
+ int
2041
+ Number of items cleaned up.
2042
+
2043
+ Examples
2044
+ --------
2045
+ >>> # Clean up all downloaded files
2046
+ >>> count = engine.cleanup_downloads()
2047
+ >>> print(f"Cleaned up {count} files")
2048
+
2049
+ >>> # Clean up specific CID's downloads
2050
+ >>> engine.cleanup_downloads(cid="QmHash123...")
2051
+ """
2052
+ count = 0
2053
+
2054
+ try:
2055
+ if not os.path.exists(self.__downloads_dir):
2056
+ if show_logs:
2057
+ self.Pd(f"Downloads directory does not exist: {self.__downloads_dir}")
2058
+ return 0
2059
+
2060
+ if cid is not None:
2061
+ # Clean up specific CID
2062
+ target_path = os.path.join(self.__downloads_dir, cid)
2063
+ if os.path.exists(target_path):
2064
+ try:
2065
+ if os.path.isdir(target_path):
2066
+ shutil.rmtree(target_path)
2067
+ else:
2068
+ os.remove(target_path)
2069
+ count = 1
2070
+ if show_logs:
2071
+ self.P(f"Cleaned up local files for CID: {cid}", color='g')
2072
+ # Remove from tracking dict
2073
+ self.__downloaded_files.pop(cid, None)
2074
+ except Exception as e:
2075
+ if show_logs:
2076
+ self.P(f"Error cleaning up CID {cid}: {e}", color='r')
2077
+ else:
2078
+ if show_logs:
2079
+ self.Pd(f"No local files found for CID: {cid}")
2080
+ else:
2081
+ # Clean up all downloads
2082
+ for item in os.listdir(self.__downloads_dir):
2083
+ item_path = os.path.join(self.__downloads_dir, item)
2084
+ try:
2085
+ if os.path.isdir(item_path):
2086
+ shutil.rmtree(item_path)
2087
+ else:
2088
+ os.remove(item_path)
2089
+ count += 1
2090
+ except Exception as e:
2091
+ if show_logs:
2092
+ self.P(f"Error removing {item_path}: {e}", color='r')
2093
+
2094
+ if show_logs:
2095
+ self.P(f"Cleaned up {count} items from downloads directory", color='g')
2096
+
2097
+ # Clear all tracking
2098
+ self.__downloaded_files.clear()
2099
+
2100
+ except Exception as e:
2101
+ if show_logs:
2102
+ self.P(f"Error during cleanup: {e}", color='r')
2103
+
2104
+ return count
2105
+
2106
+ ######## END DELETE METHODS #########
2107
+
1525
2108
  def calculate_file_cid(
1526
2109
  self,
1527
2110
  file_path: str,
@@ -1,6 +1,7 @@
1
1
  import os
2
2
  import bz2
3
3
  import pickle
4
+ import tempfile
4
5
  from time import time
5
6
 
6
7
  class _PickleSerializationMixin(object):
@@ -55,12 +56,41 @@ class _PickleSerializationMixin(object):
55
56
  return myobj
56
57
 
57
58
 
58
- def save_pickle(self, data, fn, folder=None,
59
- use_prefix=False, verbose=True,
60
- compressed=False,
61
- subfolder_path=None,
62
- locking=True,
63
- ):
59
+ def _fsync_dir(self, dirpath: str):
60
+ """Best-effort directory fsync for durable rename; silently ignore if unsupported."""
61
+ try:
62
+ # Linux: O_DIRECTORY available; on other OSes this may fail -> we ignore.
63
+ dir_fd = os.open(dirpath, getattr(os, "O_DIRECTORY", 0))
64
+ try:
65
+ os.fsync(dir_fd)
66
+ finally:
67
+ os.close(dir_fd)
68
+ except Exception:
69
+ # Not all platforms/filesystems support directory fsync; ignore if it fails.
70
+ pass
71
+ # enddef
72
+
73
+ def _fsync_file(self, filepath: str):
74
+ """Best-effort file fsync for durability; silently ignore if unsupported."""
75
+ # Ensure file contents hit disk even if helper didn't fsync
76
+ try:
77
+ rd_fd = os.open(filepath, os.O_RDONLY)
78
+ try:
79
+ os.fsync(rd_fd)
80
+ finally:
81
+ os.close(rd_fd)
82
+ except Exception:
83
+ # Best effort; continue to replace
84
+ pass
85
+ # enddef
86
+
87
+ def save_pickle(
88
+ self, data, fn, folder=None,
89
+ use_prefix=False, verbose=True,
90
+ compressed=False,
91
+ subfolder_path=None,
92
+ locking=True,
93
+ ):
64
94
  """
65
95
  compressed: True if compression is required OR you can just add '.pklz' to `fn`
66
96
  """
@@ -69,7 +99,6 @@ class _PickleSerializationMixin(object):
69
99
  if verbose:
70
100
  self.P(s)
71
101
  return
72
-
73
102
  # enddef
74
103
 
75
104
  lfld = self.get_target_folder(target=folder)
@@ -87,36 +116,62 @@ class _PickleSerializationMixin(object):
87
116
  datafile = os.path.join(datafolder, fn)
88
117
 
89
118
  os.makedirs(os.path.split(datafile)[0], exist_ok=True)
119
+ target_dir = os.path.dirname(datafile)
90
120
 
91
121
  tm_start = time()
92
122
  tm_elapsed = None
93
123
  err_msg = None
94
- if compressed or '.pklz' in fn:
95
- if not compressed:
96
- P("Saving pickle with compression=True forced due to extension")
97
- else:
98
- P("Saving pickle with compression...")
99
- if self._save_compressed_pickle(datafile, myobj=data, locking=locking):
100
- tm_elapsed = time() - tm_start
101
- P(" Compressed pickle {} saved in {} folder in {:.1f}s".format(fn, folder, tm_elapsed))
102
- else:
103
- P(" FAILED compressed pickle save!")
104
- else:
105
- P("Saving uncompressed pikle (lock:{}) : {} ".format(locking, datafile))
106
- with self.managed_lock_resource(datafile, condition=locking):
107
- try:
108
- with open(datafile, 'wb') as fhandle:
109
- pickle.dump(data, fhandle, protocol=pickle.HIGHEST_PROTOCOL)
124
+
125
+ # Create a temp file in the SAME directory so os.replace is atomic
126
+ tmp_fd, tmp_path = tempfile.mkstemp(prefix=os.path.basename(fn) + ".", suffix=".tmp", dir=target_dir)
127
+ os.close(tmp_fd) # we'll reopen with Python I/O or let helper write to it
128
+
129
+ try:
130
+ if compressed or '.pklz' in fn:
131
+ if not compressed:
132
+ P("Saving pickle with compression=True forced due to extension")
133
+ else:
134
+ P("Saving pickle with compression...")
135
+
136
+ ok = self._save_compressed_pickle(tmp_path, myobj=data, locking=locking)
137
+ if ok:
138
+ # Ensure data is flushed to disk before rename
139
+ self._fsync_file(tmp_path)
140
+ os.replace(tmp_path, datafile) # atomic move
141
+ self._fsync_dir(target_dir)
110
142
  tm_elapsed = time() - tm_start
111
- except Exception as e:
112
- err_msg = e
113
- if tm_elapsed is not None:
114
- if verbose:
115
- P(" Saved pickle '{}' in '{}' folder in {:.1f}s".format(fn, folder, tm_elapsed))
143
+ P(" Compressed pickle {} saved in {} folder in {:.1f}s".format(fn, folder, tm_elapsed))
144
+ else:
145
+ P(" FAILED compressed pickle save!")
116
146
  else:
117
- # maybe show this only if verbose?
118
- P(f" FAILED pickle save! Error: {err_msg}")
119
- # endif compressed or not
147
+ P("Saving uncompressed pikle (lock:{}) : {} ".format(locking, datafile))
148
+ with self.managed_lock_resource(datafile, condition=locking):
149
+ try:
150
+ with open(tmp_path, 'wb') as fhandle:
151
+ pickle.dump(data, fhandle, protocol=pickle.HIGHEST_PROTOCOL)
152
+ fhandle.flush()
153
+ os.fsync(fhandle.fileno()) # ensure data is written to disk
154
+ # Atomic replace + dir fsync
155
+ os.replace(tmp_path, datafile)
156
+ self._fsync_dir(target_dir)
157
+ tm_elapsed = time() - tm_start
158
+ except Exception as e:
159
+ err_msg = e
160
+ if tm_elapsed is not None:
161
+ if verbose:
162
+ P(" Saved pickle '{}' in '{}' folder in {:.1f}s".format(fn, folder, tm_elapsed))
163
+ else:
164
+ # maybe show this only if verbose?
165
+ P(f" FAILED pickle save! Error: {err_msg}")
166
+ # endif compressed or not
167
+ except Exception as e:
168
+ err_msg = e
169
+ try:
170
+ if os.path.exists(datafile):
171
+ os.remove(tmp_path)
172
+ except:
173
+ pass
174
+ P(f" FAILED pickle save! Error: {err_msg}")
120
175
  return datafile
121
176
 
122
177
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ratio1
3
- Version: 3.4.101
3
+ Version: 3.4.103
4
4
  Summary: `ratio1` or Ration1 SDK is the Python SDK required for client app development for the Ratio1 ecosystem
5
5
  Project-URL: Homepage, https://github.com/Ratio1/ratio1_sdk
6
6
  Project-URL: Bug Tracker, https://github.com/Ratio1/ratio1_sdk/issues
@@ -1,5 +1,5 @@
1
1
  ratio1/__init__.py,sha256=YimqgDbjLuywsf8zCWE0EaUXH4MBUrqLxt0TDV558hQ,632
2
- ratio1/_ver.py,sha256=TfhCgcGxyfsiDbJ1mx2faXqlllhgAWYMj9sM802Uwq8,332
2
+ ratio1/_ver.py,sha256=Lgdyf7mPLgbrtyWTcDGuVGjytM8gXsNgL3LRey756Pw,332
3
3
  ratio1/base_decentra_object.py,sha256=iXvAAf6wPnGWzeeiRfwLojVoan-m1e_VsyPzjUQuENo,4492
4
4
  ratio1/plugins_manager_mixin.py,sha256=X1JdGLDz0gN1rPnTN_5mJXR8JmqoBFQISJXmPR9yvCo,11106
5
5
  ratio1/base/__init__.py,sha256=hACh83_cIv7-PwYMM3bQm2IBmNqiHw-3PAfDfAEKz9A,259
@@ -48,7 +48,7 @@ ratio1/const/apps.py,sha256=MD4SRTNId663D3SX78_GJa40BI_H1mdcNbL2gy_Hio0,827
48
48
  ratio1/const/base.py,sha256=QIeRH6X-u8DbezQCGipI3isL1LGComBQC5hLedO1jrQ,6042
49
49
  ratio1/const/comms.py,sha256=qEYX4ciYg8SYWSDZZTUYxzpR1--2a7UusrWzAq0hxo8,2259
50
50
  ratio1/const/environment.py,sha256=632L5GrcNqF3-JhvrC6kXzXwLMcihRgMlOkLurnOwGY,1031
51
- ratio1/const/evm_net.py,sha256=1AWhqxaH0m_sC5W8CWBHiCxB88KXNyccR1b_8uMkDzw,19141
51
+ ratio1/const/evm_net.py,sha256=lyklHZS9DsbWfhansFTXpCULjtZMA9m_KVP_4z9Lp3M,20020
52
52
  ratio1/const/formatter.py,sha256=AW3bWlqf39uaqV4BBUuW95qKYfF2OkkU4f9hy3kSVhM,200
53
53
  ratio1/const/heartbeat.py,sha256=eVWuGIP5sAIYzQjOJUehmKlYHetoquSw-sTJwSKrHk8,3223
54
54
  ratio1/const/misc.py,sha256=VDCwwpf5bl9ltx9rzT2WPVP8B3mZFRufU1tSS5MO240,413
@@ -74,7 +74,7 @@ ratio1/io_formatter/default/aixp1.py,sha256=MX0TeUR4APA-qN3vUC6uzcz8Pssz5lgrQWo7
74
74
  ratio1/io_formatter/default/default.py,sha256=gEy78cP2D5s0y8vQh4aHuxqz7D10gGfuiKF311QhrpE,494
75
75
  ratio1/ipfs/__init__.py,sha256=vXEDLUNUO6lOTMGa8iQ9Zf7ajIQq9GZuvYraAHt3meE,38
76
76
  ratio1/ipfs/ifps_keygen,sha256=PcoYuo4c89_C9FWrKq9K_28ruhKqnxNn1s3nLHiF1tc,879
77
- ratio1/ipfs/r1fs.py,sha256=5Uo_adugcGTg8kVsev9kmyUSSzh9FdM803jPdu8QEWw,72273
77
+ ratio1/ipfs/r1fs.py,sha256=SBl1_UEhVd0rb4mgk9_zsdjBtp4XQKZ5_Fj4JMWzwls,91441
78
78
  ratio1/ipfs/ipfs_setup/ipfs.service,sha256=isTJQsktPy4i1yaDA9AC1OKdlTYvsCCRRAVX-EmGqAs,248
79
79
  ratio1/ipfs/ipfs_setup/launch_service.sh,sha256=GWhZyNqtohLxJg8Q_c8YnNZduu1ddXDU-IFRRMaEyiY,141
80
80
  ratio1/ipfs/ipfs_setup/restart.sh,sha256=9xHMgkUoAMI25jeaoDVFbCa_LjojYm3ubljW58RatKE,22
@@ -93,7 +93,7 @@ ratio1/logging/logger_mixins/download_mixin.py,sha256=ZZ1QuQ7kDcUkxhu65odD2pvBRl
93
93
  ratio1/logging/logger_mixins/general_serialization_mixin.py,sha256=bNM-6AsYhKD56v79hvJDgO8un5rHH4IKv1XJ3yksseQ,7424
94
94
  ratio1/logging/logger_mixins/json_serialization_mixin.py,sha256=JnfVOdQQTlx10sTLjQv8d7DO-1lNKuK4E8vMSE_t--Q,15198
95
95
  ratio1/logging/logger_mixins/machine_mixin.py,sha256=VoGijSjj0CwfbSo_jjp3uvs_aovWwQHNDEqu5jzmWJE,3640
96
- ratio1/logging/logger_mixins/pickle_serialization_mixin.py,sha256=T2NubHBoXPkIjSEM-9NgIiYfVbOk3qHI2VQ2BkHp_MY,9553
96
+ ratio1/logging/logger_mixins/pickle_serialization_mixin.py,sha256=tmdmoRz38iJGfK4J37oEghbRizN4BdZs7F2kObZ2ldQ,11391
97
97
  ratio1/logging/logger_mixins/process_mixin.py,sha256=eI0izBAhStPOant2SZv2ZuTDH10s2ON_CkuGQEEFew4,1888
98
98
  ratio1/logging/logger_mixins/resource_size_mixin.py,sha256=nH0R9RmhJxatX_qSQu_CtkLD6sH2stMOylpp_Ym6mHI,2281
99
99
  ratio1/logging/logger_mixins/timers_mixin.py,sha256=jXQvGxpemTrKZRI5ydrTUYrqAnXzZnvwzpqN4agJlGo,17299
@@ -109,8 +109,8 @@ ratio1/utils/comm_utils.py,sha256=4cS9llRr_pK_3rNgDcRMCQwYPO0kcNU7AdWy_LtMyCY,10
109
109
  ratio1/utils/config.py,sha256=Elfkl7W4aDMvB5WZLiYlPXrecBncgTxb4hcKhQedMzI,10111
110
110
  ratio1/utils/dotenv.py,sha256=_AgSo35n7EnQv5yDyu7C7i0kHragLJoCGydHjvOkrYY,2008
111
111
  ratio1/utils/oracle_sync/oracle_tester.py,sha256=aJOPcZhtbw1XPqsFG4qYpfv2Taj5-qRXbwJzrPyeXDE,27465
112
- ratio1-3.4.101.dist-info/METADATA,sha256=BVU5wfCAfmdkLik5OxkmamwQElMbvCs22_22XF0_u00,12256
113
- ratio1-3.4.101.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
114
- ratio1-3.4.101.dist-info/entry_points.txt,sha256=DR_olREzU1egwmgek3s4GfQslBi-KR7lXsd4ap0TFxE,46
115
- ratio1-3.4.101.dist-info/licenses/LICENSE,sha256=cvOsJVslde4oIaTCadabXnPqZmzcBO2f2zwXZRmJEbE,11311
116
- ratio1-3.4.101.dist-info/RECORD,,
112
+ ratio1-3.4.103.dist-info/METADATA,sha256=PyNsUMXEKsHT_UeyQuWfHm7i6lhPLMm1fxel4GHCJ4Y,12256
113
+ ratio1-3.4.103.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
114
+ ratio1-3.4.103.dist-info/entry_points.txt,sha256=DR_olREzU1egwmgek3s4GfQslBi-KR7lXsd4ap0TFxE,46
115
+ ratio1-3.4.103.dist-info/licenses/LICENSE,sha256=cvOsJVslde4oIaTCadabXnPqZmzcBO2f2zwXZRmJEbE,11311
116
+ ratio1-3.4.103.dist-info/RECORD,,