certora-cli-beta-mirror 7.29.2__py3-none-any.whl → 7.30.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- certora_cli/CertoraProver/certoraBuild.py +176 -62
- certora_cli/CertoraProver/certoraContextAttributes.py +33 -0
- certora_cli/CertoraProver/erc7201.py +45 -0
- certora_cli/CertoraProver/storageExtension.py +386 -0
- certora_cli/EquivalenceCheck/Eq_default.conf +0 -1
- certora_cli/EquivalenceCheck/Eq_sanity.conf +0 -1
- certora_cli/Mutate/mutateApp.py +10 -3
- certora_cli/Shared/certoraUtils.py +1 -0
- certora_cli/Shared/proverCommon.py +300 -0
- certora_cli/Shared/rustProverCommon.py +62 -0
- certora_cli/certoraEVMProver.py +2 -1
- certora_cli/certoraRanger.py +4 -36
- certora_cli/certoraRun.py +62 -166
- certora_cli/certoraSolanaProver.py +39 -134
- certora_cli/certoraSorobanProver.py +31 -224
- {certora_cli_beta_mirror-7.29.2.dist-info → certora_cli_beta_mirror-7.30.1.dist-info}/METADATA +2 -2
- {certora_cli_beta_mirror-7.29.2.dist-info → certora_cli_beta_mirror-7.30.1.dist-info}/RECORD +23 -19
- certora_jars/CERTORA-CLI-VERSION-METADATA.json +1 -1
- certora_jars/Typechecker.jar +0 -0
- {certora_cli_beta_mirror-7.29.2.dist-info → certora_cli_beta_mirror-7.30.1.dist-info}/LICENSE +0 -0
- {certora_cli_beta_mirror-7.29.2.dist-info → certora_cli_beta_mirror-7.30.1.dist-info}/WHEEL +0 -0
- {certora_cli_beta_mirror-7.29.2.dist-info → certora_cli_beta_mirror-7.30.1.dist-info}/entry_points.txt +0 -0
- {certora_cli_beta_mirror-7.29.2.dist-info → certora_cli_beta_mirror-7.30.1.dist-info}/top_level.txt +0 -0
|
@@ -25,8 +25,11 @@ from collections import OrderedDict, defaultdict
|
|
|
25
25
|
from enum import Enum
|
|
26
26
|
from functools import lru_cache
|
|
27
27
|
from pathlib import Path
|
|
28
|
-
from typing import Any, Dict, List, Tuple, Optional, Set, Iterator, NoReturn
|
|
29
28
|
from Crypto.Hash import keccak
|
|
29
|
+
import tempfile
|
|
30
|
+
|
|
31
|
+
from typing import Any, Dict, List, Tuple, Optional, Set, Iterator, NoReturn
|
|
32
|
+
|
|
30
33
|
|
|
31
34
|
from CertoraProver.certoraBuildCacheManager import CertoraBuildCacheManager, CachedFiles
|
|
32
35
|
from CertoraProver.certoraBuildDataClasses import CONTRACTS, ImmutableReference, ContractExtension, ContractInSDC, SDC, \
|
|
@@ -35,6 +38,7 @@ from CertoraProver.certoraCompilerParameters import SolcParameters
|
|
|
35
38
|
from CertoraProver.certoraSourceFinders import add_source_finders
|
|
36
39
|
from CertoraProver.certoraVerifyGenerator import CertoraVerifyGenerator
|
|
37
40
|
from CertoraProver.certoraContractFuncs import Func, InternalFunc, STATEMUT, SourceBytes
|
|
41
|
+
|
|
38
42
|
from Shared.certoraUtils import is_relative_to
|
|
39
43
|
|
|
40
44
|
scripts_dir_path = Path(__file__).parent.parent.resolve() # containing directory
|
|
@@ -54,7 +58,11 @@ from Shared import certoraValidateFuncs as Vf
|
|
|
54
58
|
from CertoraProver import certoraContextValidator as Cv
|
|
55
59
|
from Shared import certoraUtils as Util
|
|
56
60
|
import CertoraProver.certoraContext as Ctx
|
|
57
|
-
|
|
61
|
+
from CertoraProver import storageExtension
|
|
62
|
+
from CertoraProver.storageExtension import (
|
|
63
|
+
NameSpacedStorage,
|
|
64
|
+
NewStorageInfo,
|
|
65
|
+
)
|
|
58
66
|
|
|
59
67
|
BUILD_IS_LIBRARY = False
|
|
60
68
|
AUTO_FINDER_PREFIX = "autoFinder_"
|
|
@@ -72,6 +80,7 @@ MUTANTS_LOCATION = "mutants_location"
|
|
|
72
80
|
|
|
73
81
|
FunctionSig = Tuple[str, List[str], List[str], str]
|
|
74
82
|
|
|
83
|
+
|
|
75
84
|
# logger for building the abstract syntax tree
|
|
76
85
|
ast_logger = logging.getLogger("ast")
|
|
77
86
|
# logger for issues calling/shelling out to external functions
|
|
@@ -84,12 +93,10 @@ build_logger = logging.getLogger("build_conf")
|
|
|
84
93
|
# logger of the build cache
|
|
85
94
|
build_cache_logger = logging.getLogger("build_cache")
|
|
86
95
|
|
|
87
|
-
|
|
88
96
|
def fatal_error(logger: logging.Logger, msg: str) -> NoReturn:
|
|
89
97
|
logger.fatal(msg)
|
|
90
98
|
raise Exception(msg)
|
|
91
99
|
|
|
92
|
-
|
|
93
100
|
class InputConfig:
|
|
94
101
|
def __init__(self, context: CertoraContext) -> None:
|
|
95
102
|
"""
|
|
@@ -1396,7 +1403,7 @@ class CertoraBuildGenerator:
|
|
|
1396
1403
|
if not self.context.strict_solc_optimizer and self.context.solc_via_ir:
|
|
1397
1404
|
# The default optimizer steps (taken from libsolidity/interface/OptimiserSettings.h) but with the
|
|
1398
1405
|
# full inliner step removed
|
|
1399
|
-
|
|
1406
|
+
solc0_8_26_to_0_8_30 = ("dhfoDgvulfnTUtnIfxa[r]EscLMVcul[j]Trpeulxa[r]cLCTUca[r]LSsTFOtfDnca[r]" +
|
|
1400
1407
|
"IulcscCTUtx[scCTUt]TOntnfDIuljmul[jul]VcTOculjmul")
|
|
1401
1408
|
solc0_8_13_to_0_8_25 = "dhfoDgvulfnTUtnIf[xa[r]EscLMcCTUtTOntnfDIulLculVcul[j]T" + \
|
|
1402
1409
|
"peulxa[rul]xa[r]cLgvifCTUca[r]LSsTFOtfDnca[r]Iulc]jmul[jul]VcTOculjmul"
|
|
@@ -1440,8 +1447,8 @@ class CertoraBuildGenerator:
|
|
|
1440
1447
|
yul_optimizer_steps = solc0_8_12
|
|
1441
1448
|
elif minor == 8 and 13 <= patch <= 25:
|
|
1442
1449
|
yul_optimizer_steps = solc0_8_13_to_0_8_25
|
|
1443
|
-
elif minor == 8 and 26 <= patch <=
|
|
1444
|
-
yul_optimizer_steps =
|
|
1450
|
+
elif minor == 8 and 26 <= patch <= 30:
|
|
1451
|
+
yul_optimizer_steps = solc0_8_26_to_0_8_30
|
|
1445
1452
|
assert yul_optimizer_steps is not None, \
|
|
1446
1453
|
'Yul Optimizer steps missing for requested Solidity version. Please contact Certora team.'
|
|
1447
1454
|
|
|
@@ -2539,7 +2546,6 @@ class CertoraBuildGenerator:
|
|
|
2539
2546
|
Util.print_progress_message(f"Compiling {orig_file_name}...")
|
|
2540
2547
|
sdc_pre_finders = self.collect_for_file(build_arg_contract_file, i, compiler_lang, Path(os.getcwd()),
|
|
2541
2548
|
path_for_compiler_collector_file, original_sdc=None)
|
|
2542
|
-
|
|
2543
2549
|
# Build sources tree
|
|
2544
2550
|
build_logger.debug("Building source tree")
|
|
2545
2551
|
sources_from_pre_finder_SDCs = set()
|
|
@@ -2672,8 +2678,154 @@ class CertoraBuildGenerator:
|
|
|
2672
2678
|
self.handle_links()
|
|
2673
2679
|
self.handle_struct_links()
|
|
2674
2680
|
self.handle_contract_extensions()
|
|
2681
|
+
if self.context.storage_extension_annotation:
|
|
2682
|
+
self.handle_erc7201_annotations()
|
|
2675
2683
|
self.handle_storage_extension_harnesses()
|
|
2676
2684
|
|
|
2685
|
+
def extract_slayout(self, original_file: str, ns_storage: Set[NameSpacedStorage]) -> NewStorageInfo:
|
|
2686
|
+
"""
|
|
2687
|
+
Given a file containing a contract with namespaced storage, extract the storage information
|
|
2688
|
+
corresponding to the namespaced types.
|
|
2689
|
+
|
|
2690
|
+
Args:
|
|
2691
|
+
original_file: Path to the Solidity file containing namespaced storage declarations
|
|
2692
|
+
ns_storage: Set of tuples (type_name, namespace) for each namespaced storage
|
|
2693
|
+
|
|
2694
|
+
Returns:
|
|
2695
|
+
NewStorageInfo: A tuple (fields, types) where:
|
|
2696
|
+
- fields: List of new fields added by the namespaced storage
|
|
2697
|
+
- types: Dictionary of types referenced by the fields
|
|
2698
|
+
"""
|
|
2699
|
+
file_dir = Path(original_file).parent
|
|
2700
|
+
|
|
2701
|
+
# Generate a unique name for the harness contract based on the contract names in the file
|
|
2702
|
+
harness_name = storageExtension.generate_harness_name(original_file)
|
|
2703
|
+
|
|
2704
|
+
with tempfile.NamedTemporaryFile(mode="w+t", suffix=".sol", dir=file_dir, delete=True) as tmp_file:
|
|
2705
|
+
# Import the original file.
|
|
2706
|
+
# Note we import and don't inline the file's contents since that's how the
|
|
2707
|
+
# original file is accessed also in the actual code, and compiling it
|
|
2708
|
+
# directly can cause issues.
|
|
2709
|
+
tmp_file.write(f"import \"{original_file}\";\n\n")
|
|
2710
|
+
|
|
2711
|
+
# Write the harness contract with dummy fields for each namespaced storage
|
|
2712
|
+
var_to_slot = storageExtension.write_harness_contract(tmp_file, harness_name, ns_storage)
|
|
2713
|
+
tmp_file.flush()
|
|
2714
|
+
|
|
2715
|
+
if self.context.extract_storage_extension_annotation:
|
|
2716
|
+
# If the flag is set, save the storage extension contract
|
|
2717
|
+
build_dir = Util.get_build_dir()
|
|
2718
|
+
shutil.copyfile(
|
|
2719
|
+
Path(tmp_file.name),
|
|
2720
|
+
build_dir / f"{Path(original_file).stem}_storage_extension.sol"
|
|
2721
|
+
)
|
|
2722
|
+
|
|
2723
|
+
# Add harness to compiler map
|
|
2724
|
+
storageExtension.add_harness_to_compiler_map(
|
|
2725
|
+
original_file,
|
|
2726
|
+
tmp_file,
|
|
2727
|
+
self.context
|
|
2728
|
+
)
|
|
2729
|
+
|
|
2730
|
+
# normalize the path exactly the way collect_for_file expects it:
|
|
2731
|
+
abs_path = Util.abs_posix_path(tmp_file.name)
|
|
2732
|
+
self.context.file_to_contract[abs_path] = {harness_name}
|
|
2733
|
+
|
|
2734
|
+
try:
|
|
2735
|
+
# Compile & fetch the raw storage_layout
|
|
2736
|
+
compile_idx = storageExtension.get_next_file_index(self.file_to_sdc_name)
|
|
2737
|
+
sdcs = self.collect_for_file(tmp_file.name, compile_idx, CompilerLangSol(), Path.cwd(), Util.abs_posix_path(tmp_file.name), None)
|
|
2738
|
+
if not sdcs:
|
|
2739
|
+
raise RuntimeError(f"Failed to compile harness contract for {tmp_file}")
|
|
2740
|
+
layout = storageExtension.extract_harness_contract_layout(sdcs, harness_name)
|
|
2741
|
+
|
|
2742
|
+
# Remap each slot according to the ERC-7201 namespace
|
|
2743
|
+
remapped_fields = storageExtension.remapped_fields_from_layout(layout, var_to_slot)
|
|
2744
|
+
|
|
2745
|
+
return (remapped_fields, layout.get('types', {}))
|
|
2746
|
+
|
|
2747
|
+
except Exception as e:
|
|
2748
|
+
build_logger.error(f"Error extracting storage layout for {original_file}: {str(e)}")
|
|
2749
|
+
raise
|
|
2750
|
+
finally:
|
|
2751
|
+
# Delete the key from the context
|
|
2752
|
+
self.context.file_to_contract.pop(abs_path, None)
|
|
2753
|
+
|
|
2754
|
+
def handle_erc7201_annotations(self) -> None:
|
|
2755
|
+
"""
|
|
2756
|
+
Look for contracts that use erc-7201 namespaced storage layout
|
|
2757
|
+
(see https://eips.ethereum.org/EIPS/eip-7201).
|
|
2758
|
+
|
|
2759
|
+
Find contracts A s.t. A contain a type declaration with such an annotation, e.g.
|
|
2760
|
+
/** @custom:storage-location erc-7201:some.name.space */
|
|
2761
|
+
struct T { ... }
|
|
2762
|
+
|
|
2763
|
+
Then, for any contract C that has A as a base contract, _extend_ C's storage layout
|
|
2764
|
+
information such that it contains the information for a `T` at the slot
|
|
2765
|
+
erc-7201(some.name.space) as defined in the EIP.
|
|
2766
|
+
"""
|
|
2767
|
+
# Find all erc7201-like contracts, generate+compile a harness & extract layout information
|
|
2768
|
+
# maps (path,contract) -> new storage info added by (path,contract)
|
|
2769
|
+
slayouts: Dict[Tuple[str, str], NewStorageInfo] = {}
|
|
2770
|
+
|
|
2771
|
+
# Scan all of the contracts (including dependencies of targets) for namespaced storage
|
|
2772
|
+
# layout information
|
|
2773
|
+
for target_file in self.context.file_paths:
|
|
2774
|
+
if target_file not in self.asts:
|
|
2775
|
+
# No AST for this file, so we can't do anything
|
|
2776
|
+
continue
|
|
2777
|
+
for (imported_file, imported_file_ast) in self.asts[target_file].items():
|
|
2778
|
+
for def_node in imported_file_ast.values():
|
|
2779
|
+
if def_node.get("nodeType") != "ContractDefinition":
|
|
2780
|
+
continue
|
|
2781
|
+
|
|
2782
|
+
# Construct a key for the contract definition node
|
|
2783
|
+
contract_name = def_node.get("name")
|
|
2784
|
+
key = (imported_file, contract_name)
|
|
2785
|
+
if key in slayouts:
|
|
2786
|
+
# We already have this contract's storage layout information
|
|
2787
|
+
continue
|
|
2788
|
+
|
|
2789
|
+
# Collect any @custom:storage-location annotations
|
|
2790
|
+
ns_storage = storageExtension.get_namespace_storage_from_ast(def_node)
|
|
2791
|
+
|
|
2792
|
+
if not ns_storage:
|
|
2793
|
+
# No namespaced storage found in this contract
|
|
2794
|
+
continue
|
|
2795
|
+
|
|
2796
|
+
# Now that we have all the storage layout information, extract it once
|
|
2797
|
+
slayouts[key] = self.extract_slayout(imported_file, ns_storage)
|
|
2798
|
+
|
|
2799
|
+
if self.context.test == str(Util.TestValue.STORAGE_EXTENSION_LAYOUT):
|
|
2800
|
+
raise Util.TestResultsReady(slayouts)
|
|
2801
|
+
|
|
2802
|
+
if not slayouts:
|
|
2803
|
+
# No contracts with namespaced storage found
|
|
2804
|
+
return
|
|
2805
|
+
|
|
2806
|
+
# Finally, extend each target contract with the storage layout info from
|
|
2807
|
+
# all of its base contracts
|
|
2808
|
+
for target in self.get_primary_contracts_from_sdcs():
|
|
2809
|
+
if target.name not in self.context.contract_to_file:
|
|
2810
|
+
# This is a contract that was not compiled, so we don't have a file for it
|
|
2811
|
+
continue
|
|
2812
|
+
target_file = self.context.contract_to_file[target.name]
|
|
2813
|
+
base_contracts = self.retrieve_base_contracts_list(
|
|
2814
|
+
target_file,
|
|
2815
|
+
Util.abs_posix_path(target_file),
|
|
2816
|
+
target.name
|
|
2817
|
+
)
|
|
2818
|
+
extensions: Set[str] = set()
|
|
2819
|
+
harnesses: Dict[str, NewStorageInfo] = {}
|
|
2820
|
+
for base in base_contracts:
|
|
2821
|
+
layout = slayouts.get((base[0], base[1]))
|
|
2822
|
+
if layout is not None:
|
|
2823
|
+
extensions.add(base[1])
|
|
2824
|
+
harnesses[base[1]] = layout
|
|
2825
|
+
else:
|
|
2826
|
+
build_logger.warning(f"Could not find storage layout for {base[1]} in {base[0]}")
|
|
2827
|
+
storageExtension.apply_extensions(target, extensions, harnesses)
|
|
2828
|
+
|
|
2677
2829
|
def handle_storage_extension_harnesses(self) -> None:
|
|
2678
2830
|
def new_field_of_node(ext_instance: Any, node: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
|
2679
2831
|
"""
|
|
@@ -2760,52 +2912,6 @@ class CertoraBuildGenerator:
|
|
|
2760
2912
|
new_fields.append(new_field)
|
|
2761
2913
|
|
|
2762
2914
|
return (ext_instance, extension_sdc_name, new_fields)
|
|
2763
|
-
|
|
2764
|
-
def apply_extensions(target_contract: Any, extensions: Set[str], to_add: Dict[str, Tuple[List[Dict[str, Any]], Dict[str, Any]]]) -> None:
|
|
2765
|
-
"""
|
|
2766
|
-
Apply the fields from each extension to the target contract,
|
|
2767
|
-
"""
|
|
2768
|
-
if target_contract.storage_layout.get("storage") is None:
|
|
2769
|
-
target_contract.storage_layout["storage"] = []
|
|
2770
|
-
if target_contract.storage_layout.get("types") is None:
|
|
2771
|
-
target_contract.storage_layout["types"] = {}
|
|
2772
|
-
target_slots = {storage["slot"] for storage in target_contract.storage_layout["storage"]}
|
|
2773
|
-
target_vars = {storage["label"] for storage in target_contract.storage_layout["storage"]}
|
|
2774
|
-
# Keep track of slots we've added, and error if we
|
|
2775
|
-
# find two extensions extending the same slot
|
|
2776
|
-
added_slots: Dict[str, str] = {}
|
|
2777
|
-
added_vars: Dict[str, str] = {}
|
|
2778
|
-
for ext in extensions:
|
|
2779
|
-
(new_fields, new_types) = to_add[ext]
|
|
2780
|
-
|
|
2781
|
-
for f in new_fields:
|
|
2782
|
-
# See if any of the new fields is a slot or variable name we've already added
|
|
2783
|
-
slot = f["slot"]
|
|
2784
|
-
var = f["label"]
|
|
2785
|
-
if slot in added_slots:
|
|
2786
|
-
seen = added_slots[slot]
|
|
2787
|
-
raise Util.CertoraUserInputError(f"Slot {slot} added to {target_contract.name} by {ext} was already added by {seen}")
|
|
2788
|
-
|
|
2789
|
-
if var in added_vars:
|
|
2790
|
-
seen = added_vars[var]
|
|
2791
|
-
raise Util.CertoraUserInputError(f"Var '{var}' added to {target_contract.name} by {ext} was already added by {seen}")
|
|
2792
|
-
|
|
2793
|
-
if slot in target_slots:
|
|
2794
|
-
raise Util.CertoraUserInputError(f"Slot {slot} added to {target_contract.name} by {ext} is already mapped by {target_contract.name}")
|
|
2795
|
-
|
|
2796
|
-
if var in target_vars:
|
|
2797
|
-
raise Util.CertoraUserInputError(f"Var '{var}' added to {target_contract.name} by {ext} is already declared by {target_contract.name}")
|
|
2798
|
-
|
|
2799
|
-
added_slots[slot] = ext
|
|
2800
|
-
added_vars[var] = ext
|
|
2801
|
-
|
|
2802
|
-
target_contract.storage_layout["storage"].extend(new_fields)
|
|
2803
|
-
|
|
2804
|
-
for (new_id, new_ty) in new_types.items():
|
|
2805
|
-
if new_id in target_contract.storage_layout["types"]:
|
|
2806
|
-
continue
|
|
2807
|
-
target_contract.storage_layout["types"][new_id] = new_ty
|
|
2808
|
-
|
|
2809
2915
|
extension_contracts: Set[str] = set()
|
|
2810
2916
|
storage_extensions: Dict[str, Set[str]] = defaultdict(set)
|
|
2811
2917
|
storage_ext = self.context.storage_extension_harnesses
|
|
@@ -2834,7 +2940,7 @@ class CertoraBuildGenerator:
|
|
|
2834
2940
|
sdc = self.SDCs[target_sdc]
|
|
2835
2941
|
target_contract = sdc.find_contract(target)
|
|
2836
2942
|
assert target_contract is not None, f"could not find contract for {target}"
|
|
2837
|
-
apply_extensions(target_contract, extensions, extension_to_fields_and_types)
|
|
2943
|
+
storageExtension.apply_extensions(target_contract, extensions, extension_to_fields_and_types)
|
|
2838
2944
|
|
|
2839
2945
|
def finders_compilation_round(self,
|
|
2840
2946
|
build_arg_contract_file: str,
|
|
@@ -3436,8 +3542,9 @@ class CertoraBuildGenerator:
|
|
|
3436
3542
|
raise Util.CertoraUserInputError(f"collect_sources: {path_to_file} does not exist cwd - {Path.cwd()}."
|
|
3437
3543
|
f"abs - {os.path.normpath(Path.cwd() / path_to_file)}")
|
|
3438
3544
|
|
|
3439
|
-
sources = set()
|
|
3440
|
-
|
|
3545
|
+
sources = set(sources_from_SDCs)
|
|
3546
|
+
if context.files:
|
|
3547
|
+
sources.update(Path(p) for p in context.files) # all files in "files" attribute are uploaded
|
|
3441
3548
|
sources |= certora_verify_generator.get_spec_files()
|
|
3442
3549
|
if Util.PACKAGE_FILE.exists():
|
|
3443
3550
|
add_to_sources(Util.PACKAGE_FILE)
|
|
@@ -3546,7 +3653,8 @@ def build_source_tree(sources: Set[Path], context: CertoraContext, overwrite: bo
|
|
|
3546
3653
|
build_logger.debug("Couldn't copy repro conf to certora sources.", exc_info=e)
|
|
3547
3654
|
raise
|
|
3548
3655
|
|
|
3549
|
-
def build_from_scratch(
|
|
3656
|
+
def build_from_scratch(context: CertoraContext,
|
|
3657
|
+
certora_build_generator: CertoraBuildGenerator,
|
|
3550
3658
|
certora_verify_generator: CertoraVerifyGenerator,
|
|
3551
3659
|
build_cache_enabled: bool) -> CachedFiles:
|
|
3552
3660
|
"""
|
|
@@ -3572,9 +3680,15 @@ def build_from_scratch(certora_build_generator: CertoraBuildGenerator,
|
|
|
3572
3680
|
may_store_in_build_cache = True
|
|
3573
3681
|
absolute_sources_dir = Util.get_certora_sources_dir().absolute()
|
|
3574
3682
|
for sdc in certora_build_generator.SDCs.values():
|
|
3683
|
+
|
|
3684
|
+
# add to cache also source files that were found in the SDCs (e.g., storage extensions)
|
|
3685
|
+
paths_set = sdc.all_contract_files
|
|
3686
|
+
for p in context.files:
|
|
3687
|
+
paths_set.add(Path(p).absolute())
|
|
3688
|
+
|
|
3575
3689
|
# the contract files in SDCs are relative to .certora_sources. Which isn't good for us here.
|
|
3576
3690
|
# Need to be relative to original paths
|
|
3577
|
-
for f in
|
|
3691
|
+
for f in paths_set:
|
|
3578
3692
|
if is_relative_to(f, absolute_sources_dir):
|
|
3579
3693
|
rel_f = f.relative_to(absolute_sources_dir)
|
|
3580
3694
|
else:
|
|
@@ -3623,7 +3737,7 @@ def build_from_cache_or_scratch(context: CertoraContext,
|
|
|
3623
3737
|
cached_files: Optional[CachedFiles] = None
|
|
3624
3738
|
|
|
3625
3739
|
if not context.build_cache:
|
|
3626
|
-
cached_files = build_from_scratch(certora_build_generator,
|
|
3740
|
+
cached_files = build_from_scratch(context, certora_build_generator,
|
|
3627
3741
|
certora_verify_generator,
|
|
3628
3742
|
False)
|
|
3629
3743
|
return cache_hit, False, cached_files
|
|
@@ -3634,7 +3748,7 @@ def build_from_cache_or_scratch(context: CertoraContext,
|
|
|
3634
3748
|
build_cache_disabling_options = certora_build_cache_manager.cache_disabling_options(context)
|
|
3635
3749
|
build_logger.warning("Requested to enable the build cache, but the build cache is not applicable "
|
|
3636
3750
|
f"to this run because of the given options: {build_cache_disabling_options}")
|
|
3637
|
-
cached_files = build_from_scratch(certora_build_generator,
|
|
3751
|
+
cached_files = build_from_scratch(context, certora_build_generator,
|
|
3638
3752
|
certora_verify_generator,
|
|
3639
3753
|
False)
|
|
3640
3754
|
return cache_hit, False, cached_files
|
|
@@ -3660,7 +3774,7 @@ def build_from_cache_or_scratch(context: CertoraContext,
|
|
|
3660
3774
|
cache_hit = True
|
|
3661
3775
|
else:
|
|
3662
3776
|
# rebuild
|
|
3663
|
-
cached_files = build_from_scratch(certora_build_generator,
|
|
3777
|
+
cached_files = build_from_scratch(context, certora_build_generator,
|
|
3664
3778
|
certora_verify_generator,
|
|
3665
3779
|
True)
|
|
3666
3780
|
|
|
@@ -998,6 +998,24 @@ class EvmAttributes(AttrUtil.Attributes):
|
|
|
998
998
|
disables_build_cache=False
|
|
999
999
|
)
|
|
1000
1000
|
|
|
1001
|
+
STORAGE_EXTENSION_ANNOTATION = AttrUtil.AttributeDefinition(
|
|
1002
|
+
arg_type=AttrUtil.AttrArgType.BOOLEAN,
|
|
1003
|
+
argparse_args={
|
|
1004
|
+
'action': AttrUtil.STORE_TRUE
|
|
1005
|
+
},
|
|
1006
|
+
affects_build_cache_key=True,
|
|
1007
|
+
disables_build_cache=False
|
|
1008
|
+
)
|
|
1009
|
+
|
|
1010
|
+
EXTRACT_STORAGE_EXTENSION_ANNOTATION = AttrUtil.AttributeDefinition(
|
|
1011
|
+
arg_type=AttrUtil.AttrArgType.BOOLEAN,
|
|
1012
|
+
argparse_args={
|
|
1013
|
+
'action': AttrUtil.STORE_TRUE
|
|
1014
|
+
},
|
|
1015
|
+
affects_build_cache_key=False,
|
|
1016
|
+
disables_build_cache=False
|
|
1017
|
+
)
|
|
1018
|
+
|
|
1001
1019
|
OPTIMISTIC_HASHING = AttrUtil.AttributeDefinition(
|
|
1002
1020
|
arg_type=AttrUtil.AttrArgType.BOOLEAN,
|
|
1003
1021
|
help_msg="Bound the length of data (with potentially unbounded length) to the value given in "
|
|
@@ -1128,6 +1146,21 @@ class EvmAttributes(AttrUtil.Attributes):
|
|
|
1128
1146
|
disables_build_cache=False,
|
|
1129
1147
|
)
|
|
1130
1148
|
|
|
1149
|
+
MAX_CONCURRENT_RULES = AttrUtil.AttributeDefinition(
|
|
1150
|
+
attr_validation_func=Vf.validate_non_negative_integer,
|
|
1151
|
+
argparse_args={
|
|
1152
|
+
'action': AttrUtil.UniqueStore
|
|
1153
|
+
},
|
|
1154
|
+
help_msg="Set the maximum number of parallel rule evaluations. "
|
|
1155
|
+
"Lower values (e.g., 1, 2, or 4) may reduce memory usage in large runs. "
|
|
1156
|
+
"This can sometimes help to mitigate out of memory problems.",
|
|
1157
|
+
default_desc="Number of available CPU cores.",
|
|
1158
|
+
temporary_jar_invocation_allowed=True,
|
|
1159
|
+
jar_flag="-maxConcurrentRules",
|
|
1160
|
+
affects_build_cache_key=False,
|
|
1161
|
+
disables_build_cache=False,
|
|
1162
|
+
)
|
|
1163
|
+
|
|
1131
1164
|
@classmethod
|
|
1132
1165
|
def hide_attributes(cls) -> List[str]:
|
|
1133
1166
|
# do not show these attributes in the help message
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
# The Certora Prover
|
|
2
|
+
# Copyright (C) 2025 Certora Ltd.
|
|
3
|
+
#
|
|
4
|
+
# This program is free software: you can redistribute it and/or modify
|
|
5
|
+
# it under the terms of the GNU General Public License as published by
|
|
6
|
+
# the Free Software Foundation, version 3 of the License.
|
|
7
|
+
#
|
|
8
|
+
# This program is distributed in the hope that it will be useful,
|
|
9
|
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
10
|
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
11
|
+
# GNU General Public License for more details.
|
|
12
|
+
#
|
|
13
|
+
# You should have received a copy of the GNU General Public License
|
|
14
|
+
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
15
|
+
|
|
16
|
+
from Crypto.Hash import keccak
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def calculate_keccak_hash(x: bytes) -> int:
|
|
20
|
+
"""
|
|
21
|
+
Calculates the Keccak-256 hash of the input bytes and returns it as an integer.
|
|
22
|
+
|
|
23
|
+
Args:
|
|
24
|
+
x (bytes): The input bytes to be hashed.
|
|
25
|
+
Returns:
|
|
26
|
+
int: The Keccak-256 hash value as an integer.
|
|
27
|
+
"""
|
|
28
|
+
k = keccak.new(digest_bits=256)
|
|
29
|
+
k.update(x)
|
|
30
|
+
return int(k.hexdigest(), base=16)
|
|
31
|
+
|
|
32
|
+
def erc7201(x: bytes) -> int:
|
|
33
|
+
"""
|
|
34
|
+
Hashes the input bytes using the Keccak-256 algorithm and returns
|
|
35
|
+
the result as an integer. The input is first hashed, then decremented
|
|
36
|
+
by 1, and the resulting hash is used to compute the final hash.
|
|
37
|
+
The final hash is masked to 256 bits and the last byte is cleared
|
|
38
|
+
(set to zero).
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
x (bytes): The input bytes to be hashed.
|
|
42
|
+
Returns:
|
|
43
|
+
int: The final hash value as an integer.
|
|
44
|
+
"""
|
|
45
|
+
return calculate_keccak_hash((calculate_keccak_hash(x) - 1).to_bytes(32, byteorder='big')) & (~0xff)
|