certora-cli-beta-mirror 8.6.3__py3-none-any.whl → 8.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -66,13 +66,6 @@ class CompilerLang(metaclass=AbstractAndSingleton):
66
66
  """
67
67
  return func_hash
68
68
 
69
- @staticmethod
70
- def normalize_file_compiler_path_name(file_abs_path: str) -> str:
71
- """
72
- Normalizes the absolute path name [file_abs_path] of a file, given to the compiler.
73
- """
74
- return file_abs_path
75
-
76
69
  @staticmethod
77
70
  def normalize_deployed_bytecode(deployed_bytecode: str) -> str:
78
71
  """
@@ -114,7 +107,9 @@ class CompilerLang(metaclass=AbstractAndSingleton):
114
107
  config_path: Path,
115
108
  compiler_cmd: str,
116
109
  compiler_version: Optional[CompilerVersion],
117
- data: Dict[str, Any]) -> Dict[str, Any]:
110
+ data: Dict[str, Any],
111
+ asts : Dict[str, Dict[int, Any]],
112
+ ast_key: str) -> Dict[str, Any]:
118
113
  """
119
114
  Returns the data dictionary of the contract with storage layout information if needed
120
115
  """
@@ -195,5 +190,11 @@ class CompilerCollector(ABC):
195
190
  def compiler_version(self) -> CompilerVersion:
196
191
  pass
197
192
 
193
+ def normalize_file_compiler_path_name(self, file_abs_path: str) -> str:
194
+ """
195
+ Normalizes the absolute path name [file_abs_path] of a file, given to the compiler.
196
+ """
197
+ return file_abs_path
198
+
198
199
  def __str__(self) -> str:
199
200
  return f"{self.compiler_name} {self.compiler_version}"
@@ -52,12 +52,6 @@ class CompilerLangVy(CompilerLang, metaclass=Singleton):
52
52
  except ValueError:
53
53
  raise Exception(f'{func_hash} is not convertible to hexadecimal')
54
54
 
55
- @staticmethod
56
- def normalize_file_compiler_path_name(file_abs_path: str) -> str:
57
- if not file_abs_path.startswith('/'):
58
- return '/' + file_abs_path
59
- return file_abs_path
60
-
61
55
  @staticmethod
62
56
  def normalize_deployed_bytecode(deployed_bytecode: str) -> str:
63
57
  assert deployed_bytecode.startswith("0x"), f'expected {deployed_bytecode} to have hexadecimal prefix'
@@ -785,10 +779,10 @@ class CompilerLangVy(CompilerLang, metaclass=Singleton):
785
779
  return [t.resolve_forward_declared_types(name_resolution_dict) for t in real_types]
786
780
 
787
781
  @staticmethod
788
- def extract_ast_types_and_public_vardecls(ast_body_nodes: Dict[int, Dict[str, Any]]) -> \
782
+ def extract_ast_types_and_public_vardecls(ast_body_nodes_per_file: Dict[str, Dict[int, Dict[str, Any]]]) -> \
789
783
  Tuple[List[VyperType], Dict[str, VyperType]]:
790
784
  """
791
- :param ast_body_nodes:
785
+ :param ast_body_nodes_per_file:
792
786
  :return: (types, vars) where `types` is a list of all user-defined types, and `vars` maps public variables to
793
787
  their output types. Note that `types` has been fully resolved - all `VyperTypeNameReference` nodes have been
794
788
  dereferenced
@@ -805,38 +799,50 @@ class CompilerLangVy(CompilerLang, metaclass=Singleton):
805
799
 
806
800
  # Process named constants ahead of time, as their use site in the source may precede
807
801
  # their definition site, e.g.
808
- for ast_node in ast_body_nodes.values():
809
- if ast_node['ast_type'] != 'VariableDecl':
810
- continue
811
- if ast_node['is_constant'] and ast_node['value'] is not None and \
812
- (ast_node['value']['ast_type'] == 'Int'):
813
- named_constants.update({ast_node['target']['id']: int(ast_node['value']['value'])})
814
-
815
- for ast_node in ast_body_nodes.values():
816
- if ast_node['ast_type'] == 'VariableDecl':
817
- decltype = CompilerLangVy.extract_type_from_variable_decl(ast_node, named_constants)
818
- result_types.append(decltype)
819
- if ast_node['is_public']:
820
- public_vardecls[ast_node['target']['id']] = decltype
821
- elif ast_node['ast_type'] == 'StructDef':
822
- result_types.append(CompilerLangVy.extract_type_from_struct_def(ast_node, named_constants))
823
- # Not sure if `Import` is an actual ast type. It was already there, so I am not removing it.
824
- # I only fixed the implementation of this case to what I think it should be.
825
- elif ast_node['ast_type'] == 'Import':
826
- result_types.append(CompilerLangVy.VyperTypeContract(ast_node['name']))
827
- elif ast_node['ast_type'] == 'ImportFrom':
828
- result_types.append(CompilerLangVy.VyperTypeContract(ast_node['name']))
829
- elif ast_node['ast_type'] == 'InterfaceDef':
830
- result_types.append(CompilerLangVy.VyperTypeContract(ast_node['name']))
831
- resolved_result_types = CompilerLangVy.resolve_extracted_types(result_types)
832
- return resolved_result_types, resolve_vardecl_types(public_vardecls, resolved_result_types)
802
+ for _, ast_body_nodes in ast_body_nodes_per_file.items():
803
+ for ast_node in ast_body_nodes.values():
804
+ if ast_node['ast_type'] != 'VariableDecl':
805
+ continue
806
+ if ast_node['is_constant'] and ast_node['value'] is not None and \
807
+ (ast_node['value']['ast_type'] == 'Int'):
808
+ named_constants.update({ast_node['target']['id']: int(ast_node['value']['value'])})
809
+
810
+ resolved_result_types = []
811
+ for _, ast_body_nodes in ast_body_nodes_per_file.items():
812
+ for ast_node in ast_body_nodes.values():
813
+ if ast_node['ast_type'] == 'VariableDecl':
814
+ decltype = CompilerLangVy.extract_type_from_variable_decl(ast_node, named_constants)
815
+ result_types.append(decltype)
816
+ if ast_node['is_public']:
817
+ public_vardecls[ast_node['target']['id']] = decltype
818
+ elif ast_node['ast_type'] == 'StructDef':
819
+ result_types.append(CompilerLangVy.extract_type_from_struct_def(ast_node, named_constants))
820
+ # Not sure if `Import` is an actual ast type. It was already there, so I am not removing it.
821
+ # I only fixed the implementation of this case to what I think it should be.
822
+ elif ast_node['ast_type'] in ['Import', 'ImportFrom']:
823
+ if "name" in ast_node:
824
+ result_types.append(CompilerLangVy.VyperTypeContract(ast_node['name']))
825
+ elif "names" in ast_node:
826
+ n_list = ast_node["names"]
827
+ for t in n_list:
828
+ result_types.append(CompilerLangVy.VyperTypeContract(t["name"]))
829
+ else:
830
+ raise Exception("Unrecognized import node")
831
+ elif ast_node['ast_type'] == 'InterfaceDef':
832
+ result_types.append(CompilerLangVy.VyperTypeContract(ast_node['name']))
833
+ resolved_result_types.extend(CompilerLangVy.resolve_extracted_types(result_types))
834
+
835
+ # SG: I'm not sure why we didn't set it as a set to begin with. Punting for now
836
+ return list(set(resolved_result_types)), resolve_vardecl_types(public_vardecls, resolved_result_types)
833
837
 
834
838
  @staticmethod
835
839
  def collect_storage_layout_info(file_abs_path: str,
836
840
  config_path: Path,
837
841
  compiler_cmd: str,
838
842
  compiler_version: Optional[CompilerVersion],
839
- data: Dict[str, Any]) -> Dict[str, Any]:
843
+ data: Dict[str, Any],
844
+ asts : Dict[str, Dict[int, Any]],
845
+ ast_key: str) -> Dict[str, Any]:
840
846
  # only Vyper versions 0.2.16 and up have the storage layout
841
847
  if compiler_version is None or not CompilerCollectorVy.supports_storage_layout(compiler_version):
842
848
  return data
@@ -872,20 +878,6 @@ class CompilerLangVy(CompilerLang, metaclass=Singleton):
872
878
  print(f'Error: {e}')
873
879
  print_failed_to_run(compiler_cmd)
874
880
  raise
875
- ast_output_file_name = f'{get_certora_config_dir()}.ast'
876
- ast_stdout_name = storage_layout_output_file_name + '.stdout'
877
- ast_stderr_name = storage_layout_output_file_name + '.stderr'
878
- args = [compiler_cmd, '-f', 'ast', '-o', ast_output_file_name, file_abs_path]
879
- with Path(ast_stdout_name).open('w+') as stdout:
880
- with Path(ast_stderr_name).open('w+') as stderr:
881
- try:
882
- subprocess.run(args, stdout=stdout, stderr=stderr)
883
- with Path(ast_output_file_name).open('r') as output_file:
884
- ast_dict = json.load(output_file)
885
- except Exception as e:
886
- print(f'Error: {e}')
887
- print_failed_to_run(compiler_cmd)
888
- raise
889
881
 
890
882
  # Depressing how many bugs old Vyper had. Example:
891
883
  # vyper 0.3.7: "userBalances": {"type": "HashMap[address, uint256]", "slot": 1}
@@ -893,10 +885,7 @@ class CompilerLangVy(CompilerLang, metaclass=Singleton):
893
885
  # "location": "storage", "slot": 2}
894
886
  # so we'll just gracefully exit
895
887
  try:
896
-
897
- extracted_types, _ = CompilerLangVy.extract_ast_types_and_public_vardecls(
898
- {x['node_id']: x for x in ast_dict['ast']['body']}
899
- )
888
+ extracted_types, _ = CompilerLangVy.extract_ast_types_and_public_vardecls(asts)
900
889
  all_used_types = list(itertools.chain.from_iterable([e.get_used_types() for e in extracted_types])) + \
901
890
  list(CompilerLangVy.primitive_types.values())
902
891
  type_descriptors_by_name = {i.get_canonical_vyper_name(): i.get_storage_type_descriptor()
@@ -931,22 +920,63 @@ class CompilerLangVy(CompilerLang, metaclass=Singleton):
931
920
 
932
921
  return desc
933
922
 
934
- storage_field = [{
935
- 'label': v,
936
- 'slot': str(storage_layout_dict[v]['slot']),
937
- 'offset': 0,
938
- 'type': storage_layout_dict[v]['type'],
939
- 'descriptor': annotate_desc(type_descriptors_by_name[storage_layout_dict[v]['type']],
940
- storage_layout_dict[v]['type'], types_field)
941
- } for v in storage_layout_dict.keys()]
942
-
943
- contract_name = list(data['contracts'][file_abs_path].keys())[0]
944
- data['contracts'][file_abs_path][contract_name]['storageLayout'] = {
923
+ def extract_storage_fields(storage_layout_dict: Dict[str, Any],
924
+ type_descriptors_by_name: Dict[str, Dict[str, Any]],
925
+ types_field: Dict[str, Dict[str, Any]],
926
+ parent_path: str = "") -> List[Dict[str, Any]]:
927
+ """
928
+ Recursively traverse storage layout dictionary and extract all fields with 'slot' keys.
929
+
930
+ Args:
931
+ storage_layout_dict: The storage layout dictionary to traverse
932
+ type_descriptors_by_name: Type descriptors mapping
933
+ types_field: Types field for annotation
934
+ parent_path: Current path in the hierarchy (for building field labels)
935
+
936
+ Returns:
937
+ List of storage field dictionaries
938
+ """
939
+ storage_fields = []
940
+
941
+ for key, value in storage_layout_dict.items():
942
+ current_path = f"{parent_path}.{key}" if parent_path else key
943
+
944
+ if isinstance(value, dict):
945
+ # Check if this dict contains a 'slot' key (leaf node)
946
+ if 'slot' in value:
947
+ # This is a storage variable - process it
948
+ storage_fields.append({
949
+ 'label': current_path,
950
+ 'slot': str(value['slot']),
951
+ 'offset': 0,
952
+ 'type': value['type'],
953
+ 'descriptor': annotate_desc(
954
+ type_descriptors_by_name[value['type']],
955
+ value['type'],
956
+ types_field
957
+ )
958
+ })
959
+ else:
960
+ # This is a nested structure - recurse into it
961
+ storage_fields.extend(
962
+ extract_storage_fields(value, type_descriptors_by_name, types_field, current_path)
963
+ )
964
+
965
+ return storage_fields
966
+
967
+ storage_field = extract_storage_fields(storage_layout_dict, type_descriptors_by_name, types_field)
968
+
969
+ data_key = file_abs_path if file_abs_path in data["contracts"] else ast_key
970
+ if data_key not in data["contracts"]:
971
+ raise Exception(f"Expected to have the right key into the json out for updating the storage layout, "
972
+ f"tried {file_abs_path} and {ast_key} but keys are {data['contracts'].keys()}")
973
+ contract_name = list(data['contracts'][data_key].keys())[0]
974
+ data['contracts'][data_key][contract_name]['storageLayout'] = {
945
975
  'storage': storage_field,
946
976
  'types': types_field,
947
977
  'storageHashArgsReversed': True
948
978
  }
949
- data['contracts'][file_abs_path][contract_name]['storageHashArgsReversed'] = True
979
+ data['contracts'][data_key][contract_name]['storageHashArgsReversed'] = True
950
980
  return data
951
981
  except Exception as e:
952
982
  ast_logger.warning(f'Failed to get storage layout, continuing: {e}')
@@ -1175,7 +1205,7 @@ class CompilerLangVy(CompilerLang, metaclass=Singleton):
1175
1205
  return funcs
1176
1206
 
1177
1207
  vyper_types, public_vardecls = \
1178
- CompilerLangVy.extract_ast_types_and_public_vardecls(asts[build_arg_contract_file][contract_file])
1208
+ CompilerLangVy.extract_ast_types_and_public_vardecls(asts[build_arg_contract_file])
1179
1209
  ct_types = [x.get_certora_type(contract_name, 0) for x in vyper_types]
1180
1210
  getter_vars_list = [(v, public_vardecls[v].get_certora_type(contract_name, 0))
1181
1211
  for v in public_vardecls if isinstance(public_vardecls[v], CompilerLangVy.VyperTypeHashMap)]
@@ -1208,7 +1238,7 @@ class CompilerLangVy(CompilerLang, metaclass=Singleton):
1208
1238
 
1209
1239
  try:
1210
1240
  # TODO: verify that the collected functions matches the information in data['abi']
1211
- collector = Collector(contract_name, asts[build_arg_contract_file][contract_file])
1241
+ collector = Collector(contract_name, asts[build_arg_contract_file])
1212
1242
  type_descriptions_and_funcs = [t.get_certora_type(contract_name, 0) for t in
1213
1243
  collector.types.values()], collector.funcs
1214
1244
 
@@ -1249,17 +1279,24 @@ class Collector:
1249
1279
 
1250
1280
  _contract_name : str
1251
1281
 
1252
- def __init__(self, contract_name : str, asts: Dict[int, Dict[str, Any]]):
1282
+ def __init__(self, contract_name : str, asts_per_file: Dict[str, Dict[int, Dict[str, Any]]]):
1253
1283
  """Collect the types and functions from the top-level 'AST' node in [ast]."""
1254
1284
  self.types = {}
1255
1285
  self.funcs = []
1256
1286
  self.consts = {}
1257
1287
  self._contract_name = contract_name
1258
- for node in asts.values():
1259
- if node['ast_type'] == 'Module':
1260
- self._collect_module(node)
1261
-
1262
- def _collect_module(self, module_node: Dict[str, Any]) -> None:
1288
+ # first pass - get all constants
1289
+ for _, asts in asts_per_file.items():
1290
+ for node in asts.values():
1291
+ if node['ast_type'] == 'Module':
1292
+ self._collect_module_consts(node)
1293
+
1294
+ for _, asts in asts_per_file.items():
1295
+ for node in asts.values():
1296
+ if node['ast_type'] == 'Module':
1297
+ self._collect_module(node)
1298
+
1299
+ def _collect_module_consts(self, module_node: Dict[str, Any]) -> None:
1263
1300
  """Populate [self.types] and [self.funcs] base on 'Module' AST node in [node]."""
1264
1301
  assert module_node['ast_type'] == "Module"
1265
1302
 
@@ -1272,6 +1309,7 @@ class Collector:
1272
1309
  for v in var_decls:
1273
1310
  self._collect_const(v)
1274
1311
 
1312
+ def _collect_module(self, module_node: Dict[str, Any]) -> None:
1275
1313
  # Extract and resolve types
1276
1314
  type_asts = {'EnumDef', 'StructDef', 'InterfaceDef', 'Import', 'Import', 'ImportFrom', 'FlagDef'}
1277
1315
  types = [e for e in module_node['body'] if e['ast_type'] in type_asts]
@@ -1286,6 +1324,7 @@ class Collector:
1286
1324
  for f in funs:
1287
1325
  self._collect_func(f)
1288
1326
 
1327
+ var_decls = [e for e in module_node['body'] if e['ast_type'] == 'VariableDecl']
1289
1328
  # Add getters for public variables (also needs to happen after type resolution)
1290
1329
  for v in var_decls:
1291
1330
  self._collect_getter(v)
@@ -1308,7 +1347,16 @@ class Collector:
1308
1347
  # TODO: this is probably wrong, since you can probably import constants and things too...
1309
1348
  # although in practice it appears that people only import constants
1310
1349
  elif type_decl_node['ast_type'] in ('InterfaceDef', 'Import', 'ImportFrom'):
1311
- vy_type = CompilerLangVy.VyperTypeContract(type_decl_node['name'])
1350
+ if "names" in type_decl_node:
1351
+ n_list = type_decl_node["names"]
1352
+ for t in n_list:
1353
+ ty = CompilerLangVy.VyperTypeContract(t["name"])
1354
+ self.types[t["name"]] = ty
1355
+ return
1356
+ elif "name" in type_decl_node:
1357
+ vy_type = CompilerLangVy.VyperTypeContract(type_decl_node['name'])
1358
+ else:
1359
+ raise AssertionError("Unexpected type definition")
1312
1360
  else:
1313
1361
  raise AssertionError("Unexpected type definition")
1314
1362
  self.types[type_decl_node['name']] = vy_type
@@ -1386,6 +1434,11 @@ class CompilerCollectorVy(CompilerCollector):
1386
1434
  def compiler_version(self) -> CompilerVersion:
1387
1435
  return self.__compiler_version
1388
1436
 
1437
+ def normalize_file_compiler_path_name(self, file_abs_path: str) -> str:
1438
+ if self.compiler_version[1] < 4 and not file_abs_path.startswith('/'):
1439
+ return '/' + file_abs_path
1440
+ return file_abs_path
1441
+
1389
1442
  @staticmethod
1390
1443
  def supports_storage_layout(version: CompilerVersion) -> bool:
1391
1444
  return (version[1] > 2 or (
@@ -15,10 +15,11 @@
15
15
 
16
16
 
17
17
  from dataclasses import dataclass
18
- from typing import Dict, Any, Optional, Generator
18
+ from typing import Dict, Any, Optional, Callable, Generator
19
19
 
20
20
  from CertoraProver.Compiler.CompilerCollectorSol import CompilerCollectorSol
21
21
  from CertoraProver.certoraBuildDataClasses import SDC, Instrumentation, Replace
22
+ from CertoraProver.certoraOffsetConverter import OffsetConverter
22
23
  from Shared import certoraUtils as Util
23
24
 
24
25
 
@@ -92,7 +93,7 @@ def find_casts(ast: Dict[int, Any]) -> list[CastInfo]:
92
93
  function_nodes = [node for node in ast.values() if node.get('nodeType') == 'FunctionDefinition']
93
94
 
94
95
  for func in function_nodes:
95
- for node in iter_all_nodes(func):
96
+ for node in iter_all_nodes_under(func):
96
97
  if isinstance(node, dict) and node.get("kind") == "typeConversion":
97
98
  arguments = node.get("arguments", [])
98
99
  if len(arguments) == 1 and isinstance(arguments[0], dict):
@@ -119,19 +120,21 @@ def casting_func_name(counter: int) -> str:
119
120
  return f"cast_{counter}"
120
121
 
121
122
 
122
- def generate_casting_function(assembly_prefix: str, cast_info: CastInfo, counter: int) -> str:
123
+ def generate_casting_function(assembly_prefix: str, cast_info: CastInfo, counter: int, line: int, column: int) -> str:
123
124
  """
124
125
  returns the text of a solidity function that does casting according to CastInfo. It also has an encoded mload
125
126
  call, to be decoded later on the kotlin side if we run the `safeCasting` builtin rule.
126
127
  """
127
- conversion_string = assembly_prefix + \
128
- "{ mstore(0xffffff6e4604afefe123321beef1b03fffffffffffffffffffff" + \
129
- f'{"%0.4x" % counter}{"%0.4x" % encode_type(cast_info.arg_type_str)}{"%0.4x" % encode_type(cast_info.res_type_str)}, x)' + "}"
128
+ conversion_string = (assembly_prefix +
129
+ "{ mstore(0xffffff6e4604afefe123321beef1b03fffffffffffffff" +
130
+ f'{"%0.5x" % line}{"%0.5x" % column}{"%0.4x" % encode_type(cast_info.arg_type_str)}{"%0.4x" % encode_type(cast_info.res_type_str)}, x)'
131
+ "}")
130
132
  function_head = f"function {casting_func_name(counter)}({cast_info.arg_type_str} x) internal pure returns ({cast_info.res_type_str})"
131
133
  return function_head + "{\n" + conversion_string + f"return {cast_info.res_type_str}(x);\n" "}\n"
132
134
 
133
135
 
134
- def generate_casting_instrumentation(asts: Dict[str, Dict[str, Dict[int, Any]]], contract_file: str, sdc: SDC) \
136
+ def generate_casting_instrumentation(asts: Dict[str, Dict[str, Dict[int, Any]]], contract_file: str, sdc: SDC,
137
+ offset_converters: dict[str, OffsetConverter]) \
135
138
  -> tuple[Dict[str, Dict[int, Instrumentation]], Dict[str, tuple[str, list[str]]]]:
136
139
  """
137
140
  Generate instrumentation for integer type casts in Solidity code.
@@ -152,7 +155,7 @@ def generate_casting_instrumentation(asts: Dict[str, Dict[str, Dict[int, Any]]],
152
155
  " when trying to add casting instrumentation")
153
156
  assembly_prefix = sdc.compiler_collector.gen_memory_safe_assembly_prefix()
154
157
 
155
- casting_funcs : Dict[str, tuple[str, list[str]]] = dict()
158
+ casting_funcs: dict[str, tuple[str, list[str]]] = dict()
156
159
  counter = 0
157
160
  original_files = sorted({Util.convert_path_for_solc_import(c.original_file) for c in sdc.contracts})
158
161
  for file_count, solfile in enumerate(original_files, start=1):
@@ -165,28 +168,32 @@ def generate_casting_instrumentation(asts: Dict[str, Dict[str, Dict[int, Any]]],
165
168
  casts = find_casts(curr_file_ast)
166
169
  for cast_info in casts:
167
170
  start_offset, src_len, file = curr_file_ast[cast_info.expr_id]["src"].split(":")
171
+ line, column = offset_converters[solfile].offset_to_line_column(int(start_offset))
168
172
  counter += 1
169
173
  per_file_inst[int(start_offset)] = Instrumentation(expected=bytes(cast_info.res_type_str[0], 'utf-8'),
170
174
  to_ins=f"{libname}.{casting_func_name(counter)}",
171
175
  mut=Replace(len(cast_info.res_type_str)))
172
- new_func = generate_casting_function(assembly_prefix, cast_info, counter)
176
+ new_func = generate_casting_function(assembly_prefix, cast_info, counter, line, column)
173
177
  per_file_casts.append(new_func)
174
178
 
175
179
  return casting_instrumentation, casting_funcs
176
180
 
177
181
 
178
- def iter_all_nodes(node: Any) -> Generator[Any, Optional[Any], None]:
182
+ def iter_all_nodes_under(node: Any, f: Callable[[Any], bool] = lambda node: True, is_inside: bool = False) \
183
+ -> Generator[Any, Optional[Any], None]:
179
184
  """
180
- Yield a node and all its subnodes in depth-first order.
185
+ Yield a node and all its subnodes in depth-first order, but only recursively under nodes where f returns True.
181
186
  Works with dict nodes that may contain nested dicts and lists.
182
187
  """
183
- yield node
188
+ inside = is_inside
189
+ if f(node):
190
+ inside = True
191
+ if inside:
192
+ yield node
184
193
 
185
194
  if isinstance(node, dict):
186
195
  for value in node.values():
187
- if isinstance(value, (dict, list)):
188
- yield from iter_all_nodes(value)
196
+ yield from iter_all_nodes_under(value, f, inside)
189
197
  elif isinstance(node, list):
190
198
  for item in node:
191
- if isinstance(item, (dict, list)):
192
- yield from iter_all_nodes(item)
199
+ yield from iter_all_nodes_under(item, f, inside)
@@ -36,15 +36,17 @@ from CertoraProver.certoraBuildDataClasses import CONTRACTS, ImmutableReference,
36
36
  Instrumentation, InsertBefore, InsertAfter, UnspecializedSourceFinder, instrumentation_logger
37
37
  from CertoraProver.certoraCompilerParameters import SolcParameters
38
38
  from CertoraProver.certoraContractFuncs import Func, InternalFunc, STATEMUT, SourceBytes, VyperMetadata
39
+ from CertoraProver.certoraOffsetConverter import generate_offset_converters
39
40
  from CertoraProver.certoraSourceFinders import add_source_finders
40
41
  from CertoraProver.certoraVerifyGenerator import CertoraVerifyGenerator
42
+ from CertoraProver.uncheckedOverflowInstrumenter import generate_overflow_instrumentation, add_instrumentation
41
43
 
42
44
  scripts_dir_path = Path(__file__).parent.parent.resolve() # containing directory
43
45
  sys.path.insert(0, str(scripts_dir_path))
44
46
  from CertoraProver.Compiler.CompilerCollector import CompilerLang, CompilerCollector
45
47
  from CertoraProver.Compiler.CompilerCollectorSol import CompilerCollectorSol, CompilerLangSol
46
48
  from CertoraProver.Compiler.CompilerCollectorYul import CompilerLangYul, CompilerCollectorYul
47
- from CertoraProver.Compiler.CompilerCollectorVy import CompilerLangVy
49
+ from CertoraProver.Compiler.CompilerCollectorVy import CompilerLangVy, CompilerCollectorVy
48
50
  from CertoraProver.Compiler.CompilerCollectorFactory import CompilerCollectorFactory, \
49
51
  get_relevant_compiler, get_compiler_lang
50
52
  from CertoraProver.certoraNodeFilters import NodeFilters as Nf
@@ -444,7 +446,7 @@ def generate_inline_finder(f: Func, internal_id: int, sym: int, compiler_collect
444
446
  return finder[1]
445
447
 
446
448
 
447
- def convert_pathname_to_posix(json_dict: Dict[str, Any], entry: str, smart_contract_lang: CompilerLang) -> None:
449
+ def convert_pathname_to_posix(json_dict: Dict[str, Any], entry: str, compiler_collector: CompilerCollector) -> None:
448
450
  """
449
451
  assuming the values kept in the entry [entry] inside [json_dict] are path names
450
452
  :param json_dict: dict to iterate on
@@ -453,7 +455,7 @@ def convert_pathname_to_posix(json_dict: Dict[str, Any], entry: str, smart_contr
453
455
  if entry in json_dict:
454
456
  json_dict_posix_paths = {}
455
457
  for file_path in json_dict[entry]:
456
- path_obj = Path(smart_contract_lang.normalize_file_compiler_path_name(file_path))
458
+ path_obj = Path(compiler_collector.normalize_file_compiler_path_name(file_path))
457
459
  if path_obj.is_file():
458
460
  json_dict_posix_paths[path_obj.as_posix()] = json_dict[entry][file_path]
459
461
  else:
@@ -461,7 +463,7 @@ def convert_pathname_to_posix(json_dict: Dict[str, Any], entry: str, smart_contr
461
463
  # protecting against long strings
462
464
  if len(json_dict_str) > 200:
463
465
  json_dict_str = json_dict_str[:200] + '...'
464
- fatal_error(compiler_logger, f"The path of the source file {file_path} "
466
+ fatal_error(compiler_logger, f"The path of the source file {file_path} ({path_obj})"
465
467
  f"in the standard json file does not exist!\n{json_dict_str} ")
466
468
  json_dict[entry] = json_dict_posix_paths
467
469
 
@@ -1152,7 +1154,12 @@ class CertoraBuildGenerator:
1152
1154
  if file_abs_path.suffix == VY:
1153
1155
  smart_contract_lang: CompilerLang = CompilerLangVy()
1154
1156
  sdc_name = self.file_to_sdc_name[Path(contract_file).absolute()]
1155
- standard_json_data = self.get_standard_json_data(sdc_name, smart_contract_lang)
1157
+ """
1158
+ maintain backward-compatibility,
1159
+ but in reality this equiavlence checker (equivChecker.py) should be removed
1160
+ """
1161
+ dummyCompilerCollectorVy = CompilerCollectorVy((0, 3, 10))
1162
+ standard_json_data = self.get_standard_json_data(sdc_name, smart_contract_lang, dummyCompilerCollectorVy)
1156
1163
  abi = standard_json_data[CONTRACTS][str(Path(contract_file).absolute())][contract_name]['abi']
1157
1164
  ast_logger.debug(f"abi is: \n{abi}")
1158
1165
  for f in filter(lambda x: self.is_imported_abi_entry(x), abi):
@@ -1163,11 +1170,14 @@ class CertoraBuildGenerator:
1163
1170
  elif file_abs_path.suffix == SOL:
1164
1171
  smart_contract_lang = CompilerLangSol()
1165
1172
  sdc_name = self.file_to_sdc_name[file_abs_path]
1166
- compilation_path = self.get_compilation_path(sdc_name)
1167
- standard_json_data = self.get_standard_json_data(sdc_name, smart_contract_lang)
1173
+ compilation_path = Path(Util.abs_posix_path(contract_file))
1174
+ compilerCollectorSol = self.compiler_coll_factory.get_compiler_collector(compilation_path)
1175
+ standard_json_data = self.get_standard_json_data(sdc_name, smart_contract_lang, compilerCollectorSol)
1168
1176
  storage_data = smart_contract_lang.collect_storage_layout_info(str(file_abs_path), compilation_path,
1169
1177
  solc, None,
1170
- standard_json_data)
1178
+ standard_json_data,
1179
+ {}, # dummy ast, not used in solc
1180
+ str(contract_file))
1171
1181
  abi = storage_data[CONTRACTS][str(file_abs_path)][contract_name]["abi"]
1172
1182
  ast_logger.debug(f"abi is: \n{abi}")
1173
1183
  for f in filter(lambda x: self.is_imported_abi_entry(x), abi):
@@ -1270,7 +1280,7 @@ class CertoraBuildGenerator:
1270
1280
  else:
1271
1281
  raise RuntimeError(f"failed to get contract bytes for {contract_name} in file {contract_file}")
1272
1282
 
1273
- def get_standard_json_data(self, sdc_name: str, smart_contract_lang: CompilerLang) -> Dict[str, Any]:
1283
+ def get_standard_json_data(self, sdc_name: str, smart_contract_lang: CompilerLang, compiler_collector : CompilerCollector) -> Dict[str, Any]:
1274
1284
  json_file = smart_contract_lang.compilation_output_path(sdc_name)
1275
1285
  process_logger.debug(f"reading standard json data from {json_file}")
1276
1286
  # jira CER_927 - under windows it happens the solc generate wrong
@@ -1278,7 +1288,7 @@ class CertoraBuildGenerator:
1278
1288
  json_dict = Util.read_json_file(json_file)
1279
1289
  entries = [CONTRACTS, "sources"]
1280
1290
  for ent in entries:
1281
- convert_pathname_to_posix(json_dict, ent, smart_contract_lang)
1291
+ convert_pathname_to_posix(json_dict, ent, compiler_collector)
1282
1292
  return json_dict
1283
1293
 
1284
1294
  def cleanup_compiler_outputs(self, sdc_name: str, smart_contract_lang: CompilerLang) -> None:
@@ -1570,6 +1580,9 @@ class CertoraBuildGenerator:
1570
1580
  """
1571
1581
  solc_json_contract_key = os.path.relpath(contract_file_as_provided, compile_wd) if self.context.use_relpaths_for_solc_json else contract_file_posix_abs
1572
1582
  compiler_collector_lang = compiler_collector.smart_contract_lang
1583
+ main_contract_for_output_selection = "*"
1584
+ search_paths_arr = None
1585
+ additional_asts = None
1573
1586
  if compiler_collector_lang == CompilerLangSol() or compiler_collector_lang == CompilerLangYul():
1574
1587
  sources_dict = {str(solc_json_contract_key): {
1575
1588
  "urls": [str(contract_file_posix_abs)]}} # type: Dict[str, Dict[str, Any]]
@@ -1578,28 +1591,59 @@ class CertoraBuildGenerator:
1578
1591
  "evm.bytecode.functionDebugData"]
1579
1592
  ast_selection = ["id", "ast"]
1580
1593
  elif compiler_collector_lang == CompilerLangVy():
1594
+ main_contract_for_output_selection = "*"
1595
+ sources_dict = {}
1581
1596
  with open(contract_file_posix_abs) as f:
1582
- contents = f.read()
1583
- sources_dict = {str(contract_file_posix_abs): {"content": contents}}
1597
+ if self.context.vyper_custom_std_json_in_map and contract_file_as_provided in self.context.vyper_custom_std_json_in_map:
1598
+ """
1599
+ If we're given a custom standard_json, we'll take from it the sources and
1600
+ the search paths.
1601
+ In particular, we will separate between the interfaces (*.vyi) and regular files,
1602
+ so that we could get the ASTs for the regular files (as those are compilation units).
1603
+ This was tested ONLY on a single contract
1604
+ (ask Shelly)
1605
+ and may require refinement
1606
+ if we get more projects.
1607
+ """
1608
+ vyper_custom_std_json_in = self.context.vyper_custom_std_json_in_map[contract_file_as_provided]
1609
+ with open(vyper_custom_std_json_in) as custom:
1610
+ custom_json = json.load(custom)
1611
+ sources_dict = custom_json.get("sources", None)
1612
+ search_paths_arr = custom_json.get("settings", {}).get("search_paths", None)
1613
+ additional_asts = [x for x, _ in sources_dict.items() if x.endswith(".vy")]
1614
+ if not sources_dict:
1615
+ contents = f.read()
1616
+ sources_dict = {str(contract_file_posix_abs): {"content": contents}}
1584
1617
  output_selection = ["abi", "evm.bytecode", "evm.deployedBytecode", "evm.methodIdentifiers"]
1585
1618
  if compiler_collector.compiler_version >= (0, 4, 4):
1586
1619
  output_selection += ["metadata", "evm.deployedBytecode.symbolMap"]
1587
1620
  ast_selection = ["ast"]
1621
+ else:
1622
+ # "non-compilable" language so no need to deal with it
1623
+ fatal_error(compiler_logger, "Expected only Solidity and Vyper as "
1624
+ "languages for which we build a standard-json")
1588
1625
 
1589
1626
  settings_dict: Dict[str, Any] = \
1590
1627
  {
1591
1628
  "remappings": remappings,
1592
1629
  "outputSelection": {
1593
- "*": {
1630
+ main_contract_for_output_selection: {
1594
1631
  "*": output_selection,
1595
1632
  "": ast_selection
1596
1633
  }
1597
1634
  }
1598
1635
  }
1636
+ if search_paths_arr:
1637
+ settings_dict["search_paths"] = search_paths_arr
1638
+ if additional_asts:
1639
+ for p in additional_asts:
1640
+ if p != main_contract_for_output_selection:
1641
+ settings_dict["outputSelection"][p] = {"": ["ast"]}
1599
1642
 
1600
1643
  self._fill_codegen_related_options(Path(contract_file_as_provided), settings_dict, compiler_collector)
1601
1644
 
1602
1645
  result_dict = {"language": compiler_collector_lang.name, "sources": sources_dict, "settings": settings_dict}
1646
+
1603
1647
  # debug_print("Standard json input")
1604
1648
  # debug_print(json.dumps(result_dict, indent=4))
1605
1649
  return result_dict
@@ -1881,7 +1925,7 @@ class CertoraBuildGenerator:
1881
1925
  compiler_input=standard_json_input)
1882
1926
 
1883
1927
  compiler_logger.debug(f"Collecting standard json: {collect_cmd}")
1884
- standard_json_data = self.get_standard_json_data(sdc_name, smart_contract_lang)
1928
+ standard_json_data = self.get_standard_json_data(sdc_name, smart_contract_lang, compiler_collector)
1885
1929
 
1886
1930
  for error in standard_json_data.get("errors", []):
1887
1931
  # is an error not a warning
@@ -1890,8 +1934,10 @@ class CertoraBuildGenerator:
1890
1934
  # 6275 the error code of solc compiler for missing file
1891
1935
  if 'errorCode' in error and error['errorCode'] == '6275':
1892
1936
  print_package_file_note()
1937
+
1938
+ error_msg = error.get("formattedMessage", error.get("message", "[no msg]"))
1893
1939
  friendly_message = f"{compiler_ver_to_run} had an error:\n" \
1894
- f"{error['formattedMessage']}"
1940
+ f"{error_msg}"
1895
1941
  if fail_on_compilation_error:
1896
1942
  raise Util.CertoraUserInputError(friendly_message)
1897
1943
  else:
@@ -1900,16 +1946,20 @@ class CertoraBuildGenerator:
1900
1946
  raise Util.SolcCompilationException(friendly_message)
1901
1947
 
1902
1948
  # load data
1903
- data = \
1904
- smart_contract_lang.collect_storage_layout_info(file_abs_path, compilation_path, compiler_ver_to_run,
1905
- compiler_collector.compiler_version,
1906
- standard_json_data) # Note we collected for just ONE file
1949
+ # In vyper, we first need the ASTs. Then collect_storage_layout_info will add
1950
+ # storage layout keys for vyper and do nothing for solidity,
1951
+ data = standard_json_data
1907
1952
  self.check_for_errors_and_warnings(data, fail_on_compilation_error)
1908
1953
  if smart_contract_lang.supports_ast_output:
1909
1954
  self.collect_asts(build_arg_contract_file, data["sources"])
1955
+ data = \
1956
+ smart_contract_lang.collect_storage_layout_info(file_abs_path, compilation_path, compiler_ver_to_run,
1957
+ compiler_collector.compiler_version,
1958
+ data, self.asts.get(build_arg_contract_file, {}),
1959
+ build_arg_contract_file) # Note we collected for just ONE file
1910
1960
 
1911
1961
  contracts_with_libraries = {}
1912
- file_compiler_path = smart_contract_lang.normalize_file_compiler_path_name(file_abs_path)
1962
+ file_compiler_path = compiler_collector.normalize_file_compiler_path_name(file_abs_path)
1913
1963
 
1914
1964
  # But apparently this heavily depends on the Solidity AST format anyway
1915
1965
 
@@ -2263,15 +2313,10 @@ class CertoraBuildGenerator:
2263
2313
  vyper_metadata.venom_via_stack = metadata_func_info['venom_via_stack']
2264
2314
  if metadata_func_info.get('venom_return_via_stack'):
2265
2315
  vyper_metadata.venom_return_via_stack = metadata_func_info['venom_return_via_stack']
2266
- pattern_in_symbol_map = re.compile(fr"{func_name}\(.*\)_runtime$")
2267
- matches = [k for k in symbol_map if pattern_in_symbol_map.search(k)]
2268
- if len(matches) == 0:
2269
- build_logger.warning(f"Could not find symbol map entry for {func_name} probably was inlined")
2316
+ symbol_map_name = metadata_func_info["_ir_identifier"] + "_runtime"
2317
+ if symbol_map_name not in symbol_map:
2270
2318
  continue
2271
- elif len(matches) > 1:
2272
- raise RuntimeError(f"Found multiple matches for {func_name} in symbol map: {matches}")
2273
- else:
2274
- vyper_metadata.runtime_start_pc = symbol_map[matches[0]]
2319
+ vyper_metadata.runtime_start_pc = symbol_map[symbol_map_name]
2275
2320
  internal_func.vyper_metadata = vyper_metadata
2276
2321
 
2277
2322
  def get_contract_in_sdc(self,
@@ -3339,14 +3384,35 @@ class CertoraBuildGenerator:
3339
3384
  else:
3340
3385
  added_source_finders = {}
3341
3386
 
3387
+ offset_converters = generate_offset_converters(sdc_pre_finder)
3388
+
3342
3389
  if self.context.safe_casting_builtin:
3343
3390
  try:
3344
- casting_instrumentations, casting_types = generate_casting_instrumentation(self.asts, build_arg_contract_file, sdc_pre_finder)
3391
+ casting_instrumentations, casting_types = generate_casting_instrumentation(self.asts, build_arg_contract_file, sdc_pre_finder, offset_converters)
3345
3392
  except Exception as e:
3346
- instrumentation_logger.warning(
3347
- f"Computing casting instrumentation failed for {build_arg_contract_file}: {e}", exc_info=True)
3348
3393
  casting_instrumentations, casting_types = {}, {}
3349
- instr = CertoraBuildGenerator.merge_dicts_instrumentation(instr, casting_instrumentations)
3394
+ instrumentation_logger.warning(f"Computing casting instrumentation failed for {build_arg_contract_file}: {e}", exc_info=True)
3395
+ else:
3396
+ casting_instrumentations, casting_types = {}, {}
3397
+
3398
+ if self.context.unchecked_overflow_builtin:
3399
+ try:
3400
+ overflow_instrumentations, op_funcs = generate_overflow_instrumentation(self.asts, build_arg_contract_file, sdc_pre_finder, offset_converters)
3401
+ except Exception as e:
3402
+ overflow_instrumentations, op_funcs = {}, {}
3403
+ instrumentation_logger.warning(
3404
+ f"Computing overflow instrumenstation failed for {build_arg_contract_file}: {e}", exc_info=True)
3405
+ else:
3406
+ overflow_instrumentations, op_funcs = {}, {}
3407
+
3408
+ for file_name, inst_dict in casting_instrumentations.items():
3409
+ if file_name not in overflow_instrumentations:
3410
+ overflow_instrumentations[file_name] = dict()
3411
+ d = overflow_instrumentations[file_name]
3412
+ for offset, inst in inst_dict.items():
3413
+ add_instrumentation(d, offset, inst)
3414
+
3415
+ instr = CertoraBuildGenerator.merge_dicts_instrumentation(instr, overflow_instrumentations)
3350
3416
 
3351
3417
  abs_build_arg_contract_file = Util.abs_posix_path(build_arg_contract_file)
3352
3418
  if abs_build_arg_contract_file not in instr:
@@ -3409,6 +3475,13 @@ class CertoraBuildGenerator:
3409
3475
  output.write(bytes(f, "utf8"))
3410
3476
  output.write(bytes("}\n", "utf8"))
3411
3477
 
3478
+ library_name, funcs = op_funcs.get(contract_file, ("", list()))
3479
+ if len(funcs) > 0:
3480
+ output.write(bytes(f"\nlibrary {library_name}" + "{\n", "utf8"))
3481
+ for f in funcs:
3482
+ output.write(bytes(f, "utf8"))
3483
+ output.write(bytes("}\n", "utf8"))
3484
+
3412
3485
  new_file = self.to_autofinder_file(build_arg_contract_file)
3413
3486
  self.context.file_to_contract[new_file] = self.context.file_to_contract[
3414
3487
  build_arg_contract_file]
@@ -3429,6 +3502,7 @@ class CertoraBuildGenerator:
3429
3502
  f"Compiling {orig_file_name} to expose internal function information and local variables...")
3430
3503
  else:
3431
3504
  Util.print_progress_message(f"Compiling {orig_file_name} to expose internal function information...")
3505
+
3432
3506
  # record what aliases we have created (for the purposes of type canonicalization, the generated autofinder
3433
3507
  # is an alias of the original file)
3434
3508
  for k, v in autofinder_remappings.items():
@@ -326,6 +326,23 @@ class EvmAttributes(AttrUtil.Attributes):
326
326
  )
327
327
  )
328
328
 
329
+ VYPER_CUSTOM_STD_JSON_IN_MAP = AttrUtil.AttributeDefinition(
330
+ arg_type=AttrUtil.AttrArgType.MAP,
331
+ attr_validation_func=Vf.validate_vyper_custom_std_json_in_map,
332
+ help_msg="Supply a base json for getting vyper compiler output, generated by `vyper -f solc_json`, on a per"
333
+ "contract basis",
334
+ default_desc="It is assumed the standard-json generated by certora-cli will be able to compile the contracts",
335
+ argparse_args={
336
+ "action": AttrUtil.UniqueStore,
337
+ "type": lambda value: Vf.parse_ordered_dict("validate_vyper_custom_std_json_in_map", value)
338
+ },
339
+ affects_build_cache_key=True,
340
+ disables_build_cache=False,
341
+ config_data=AttributeJobConfigData(
342
+ main_section=MainSection.SOLIDITY_COMPILER
343
+ )
344
+ )
345
+
329
346
  SOLC_VIA_IR = AttrUtil.AttributeDefinition(
330
347
  arg_type=AttrUtil.AttrArgType.BOOLEAN,
331
348
  help_msg="Pass the `--via-ir` flag to the Solidity compiler",
@@ -1216,7 +1233,7 @@ class EvmAttributes(AttrUtil.Attributes):
1216
1233
  SAFE_CASTING_BUILTIN = AttrUtil.AttributeDefinition(
1217
1234
  arg_type=AttrUtil.AttrArgType.BOOLEAN,
1218
1235
  help_msg="This needs to be set to true for the safeCasting builtin to work",
1219
- default_desc="This needs to be set to true for the safeCasting builtin to work",
1236
+ default_desc="safeCasting builtin will not run",
1220
1237
  jar_flag='-safeCastingBuiltin',
1221
1238
  argparse_args={
1222
1239
  'action': AttrUtil.STORE_TRUE,
@@ -1226,6 +1243,19 @@ class EvmAttributes(AttrUtil.Attributes):
1226
1243
  disables_build_cache=False,
1227
1244
  )
1228
1245
 
1246
+ UNCHECKED_OVERFLOW_BUILTIN = AttrUtil.AttributeDefinition(
1247
+ arg_type=AttrUtil.AttrArgType.BOOLEAN,
1248
+ help_msg="This needs to be set to true for the uncheckedOverflow builtin to work",
1249
+ default_desc="uncheckedOverflow builtin will not run",
1250
+ jar_flag='-uncheckedOverflowBuiltin',
1251
+ argparse_args={
1252
+ 'action': AttrUtil.STORE_TRUE,
1253
+ 'default': False
1254
+ },
1255
+ affects_build_cache_key=True,
1256
+ disables_build_cache=False,
1257
+ )
1258
+
1229
1259
  @classmethod
1230
1260
  def hide_attributes(cls) -> List[str]:
1231
1261
  # do not show these attributes in the help message
@@ -750,7 +750,14 @@ def check_map_attributes(context: CertoraContext) -> None:
750
750
 
751
751
  none_keys = [k for k, v in file_list.items() if v is False]
752
752
  if none_keys:
753
- raise Util.CertoraUserInputError(f"The following files are not matched in {map_attr_name}: {none_keys}")
753
+ if map_attr_name == Attrs.EvmAttributes.VYPER_CUSTOM_STD_JSON_IN_MAP.name.lower():
754
+ # this new attribute requires special handling in case we combine solidity files with vyper0.4 files
755
+ vy_files_unmatched = [k for k in none_keys if Util.is_vyper_file(k)]
756
+ if vy_files_unmatched:
757
+ raise Util.CertoraUserInputError(
758
+ f"The following vyper files are not matched in {map_attr_name}: {none_keys}")
759
+ else:
760
+ raise Util.CertoraUserInputError(f"The following files are not matched in {map_attr_name}: {none_keys}")
754
761
 
755
762
 
756
763
  def check_parametric_contracts(context: CertoraContext) -> None:
@@ -52,7 +52,7 @@ class VyperMetadata:
52
52
  frame_size: Optional[int] = None,
53
53
  frame_start: Optional[int] = None,
54
54
  venom_via_stack: Optional[List[str]] = None,
55
- venom_return_via_stack: bool = False,
55
+ venom_return_via_stack: Optional[bool] = None,
56
56
  runtime_start_pc: Optional[int] = None,
57
57
  ):
58
58
  self.frame_size = frame_size
@@ -0,0 +1,54 @@
1
+ # The Certora Prover
2
+ # Copyright (C) 2025 Certora Ltd.
3
+ #
4
+ # This program is free software: you can redistribute it and/or modify
5
+ # it under the terms of the GNU General Public License as published by
6
+ # the Free Software Foundation, version 3 of the License.
7
+ #
8
+ # This program is distributed in the hope that it will be useful,
9
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
10
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
+ # GNU General Public License for more details.
12
+ #
13
+ # You should have received a copy of the GNU General Public License
14
+ # along with this program. If not, see <https://www.gnu.org/licenses/>.
15
+
16
+
17
+ import bisect
18
+ from pathlib import Path
19
+
20
+ from CertoraProver.certoraBuildDataClasses import SDC
21
+ from Shared import certoraUtils as Util
22
+
23
+
24
+ class OffsetConverter:
25
+ """Holds newline positions for a file to enable offset-to-line-column conversion."""
26
+
27
+ def __init__(self, file: str):
28
+ """Initialize OffsetConverter by reading newline positions from a file."""
29
+ with Path(file).open('rb') as f:
30
+ content = f.read()
31
+ self.newline_positions = [i for i, byte in enumerate(content) if byte == ord(b'\n')]
32
+
33
+ def offset_to_line_column(self, offset: int) -> tuple[int, int]:
34
+ """
35
+ Convert a file offset to line and column number.
36
+
37
+ Args:
38
+ offset: Byte offset in the file
39
+
40
+ Returns:
41
+ Tuple of (line_number, column_number), both 1-indexed
42
+ """
43
+ line = bisect.bisect_left(self.newline_positions, offset)
44
+ # Calculate column based on previous newline position
45
+ if line == 0:
46
+ column = offset + 1 # 1-indexed, no previous newline
47
+ else:
48
+ column = offset - self.newline_positions[line - 1] # 1-indexed from newline
49
+ return line, column
50
+
51
+
52
+ def generate_offset_converters(sdc: SDC) -> dict[str, OffsetConverter]:
53
+ original_files = {Util.convert_path_for_solc_import(c.original_file) for c in sdc.contracts}
54
+ return {file: OffsetConverter(file) for file in original_files}
@@ -291,17 +291,17 @@ def is_unsupported_type(type_string: str) -> bool:
291
291
  return type_string not in PrimitiveType.allowed_primitive_type_names
292
292
 
293
293
 
294
- def find_semicolon(filepath: str, offset: int) -> Optional[int]:
294
+ def find_char(filepath: str, offset: int, c : str) -> Optional[int]:
295
295
  """
296
- From given offset, skip whitespace until finding a semicolon.
297
- Returns None if any non-whitespace character other than ';' is encountered.
296
+ given offset, skip whitespace until finding the input character `c`.
297
+ Returns None if any non-whitespace character other than `c` is encountered.
298
298
 
299
299
  Args:
300
300
  filepath: Path to the file
301
- offset: Byte offset where we expect to find ';' or whitespace
301
+ offset: Byte offset where we expect to find 'c' or whitespace
302
302
 
303
303
  Returns:
304
- Offset of the semicolon if found after only whitespace
304
+ Offset of 'c' if found after only whitespace
305
305
  None if any other non-whitespace character is encountered
306
306
  """
307
307
  try:
@@ -312,7 +312,7 @@ def find_semicolon(filepath: str, offset: int) -> Optional[int]:
312
312
  if not char: # EOF
313
313
  return None
314
314
 
315
- if char == ';':
315
+ if char == c:
316
316
  return f.tell() - 1
317
317
 
318
318
  if char not in ' \t\n\r': # not whitespace
@@ -366,7 +366,7 @@ def add_source_finders(asts: Dict[str, Dict[str, Dict[int, Any]]], contract_file
366
366
  # no need to -1 as the source mapping does not include the ';', and we want to find it...
367
367
  # i.e., the end_offset should point at ';'
368
368
  end_offset = int(start_offset) + int(src_len) # this is the original end offset
369
- end_offset_with_semicolon = find_semicolon(solfile, end_offset)
369
+ end_offset_with_semicolon = find_char(solfile, end_offset, ";")
370
370
  if end_offset_with_semicolon is None:
371
371
  # we are dealing with Solidity code with unexpected format, let's skip this assignment
372
372
  continue
@@ -0,0 +1,152 @@
1
+ # The Certora Prover
2
+ # Copyright (C) 2025 Certora Ltd.
3
+ #
4
+ # This program is free software: you can redistribute it and/or modify
5
+ # it under the terms of the GNU General Public License as published by
6
+ # the Free Software Foundation, version 3 of the License.
7
+ #
8
+ # This program is distributed in the hope that it will be useful,
9
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
10
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
+ # GNU General Public License for more details.
12
+ #
13
+ # You should have received a copy of the GNU General Public License
14
+ # along with this program. If not, see <https://www.gnu.org/licenses/>.
15
+ from typing import Any
16
+
17
+ from CertoraProver.Compiler.CompilerCollectorSol import CompilerCollectorSol
18
+ from CertoraProver.castingInstrumenter import encode_type, iter_all_nodes_under
19
+ from CertoraProver.certoraBuildDataClasses import SDC, Instrumentation, Replace, InsertBefore, InsertAfter
20
+ from CertoraProver.certoraOffsetConverter import OffsetConverter
21
+ from CertoraProver.certoraSourceFinders import find_char
22
+ from Shared import certoraUtils as Util
23
+
24
+
25
+ def is_unchecked_block(node: Any) -> bool:
26
+ return isinstance(node, dict) and node.get('nodeType') == 'UncheckedBlock'
27
+
28
+
29
+ def is_possibly_overflowing_op(node: Any) -> bool:
30
+ return isinstance(node, dict) and node.get('operator') in {"*", "+", "-"}
31
+
32
+
33
+ def find_unchecked(ast: dict[int, Any]) -> list[dict]:
34
+ function_nodes = [node for node in ast.values() if node.get('nodeType') == 'FunctionDefinition']
35
+ result = []
36
+ for func in function_nodes:
37
+ for node in iter_all_nodes_under(func, is_unchecked_block):
38
+ if is_possibly_overflowing_op(node):
39
+ result.append(node)
40
+ return result
41
+
42
+
43
+ def func_name(counter: int) -> str:
44
+ return f"op_{counter}"
45
+
46
+
47
+ def char_at(filepath: str, offset: int) -> str:
48
+ with open(filepath, 'r') as f:
49
+ f.seek(offset)
50
+ return f.read(1)
51
+
52
+
53
+ def instrumentations(filename: str, lib_name: str, op: dict, counter: int) -> dict[int, Instrumentation]:
54
+ start_offset, src_len, file = op["src"].split(":")
55
+ left = op["leftExpression"]
56
+ operator = op["operator"]
57
+ result: dict[int, Instrumentation] = {}
58
+
59
+ start_offset_left, src_len_left, _ = left["src"].split(":")
60
+ where = find_char(filename, int(start_offset_left) + int(src_len_left), operator)
61
+ if where is None:
62
+ raise Exception(f"Could not find {start_offset_left}:{src_len_left} in {filename}")
63
+ result[where] = Instrumentation(expected=bytes(operator, 'utf-8'),
64
+ to_ins=",",
65
+ mut=Replace(1))
66
+ before = int(start_offset_left)
67
+ result[before] = Instrumentation(expected=bytes(char_at(filename, before), 'utf-8'),
68
+ to_ins=f"{lib_name}.{func_name(counter)}(",
69
+ mut=InsertBefore())
70
+ after = int(start_offset) + int(src_len)
71
+ result[after] = Instrumentation(expected=bytes(char_at(filename, after), 'utf-8'),
72
+ to_ins=")",
73
+ mut=InsertBefore())
74
+ return result
75
+
76
+
77
+ def generate_overflow_function(offset_converter: OffsetConverter, assembly_prefix: str, op: dict, counter: int) -> str:
78
+ res_type = op["typeDescriptions"]["typeString"]
79
+ function_head = f"function {func_name(counter)}({res_type} x, {res_type} y) internal pure returns ({res_type})"
80
+ start_offset, _, _ = op["src"].split(":")
81
+ _, left_length, _ = op["leftExpression"]["src"].split(":")
82
+ line, column = offset_converter.offset_to_line_column(int(start_offset) + int(left_length))
83
+ encoded = ("0xffffff6e4604afefe123321beef1b04fffffffffffffffffff"
84
+ f"{'%0.5x' % line}{'%0.5x' % column}{'%0.4x' % encode_type(res_type)}")
85
+ return f"""
86
+ {function_head} {{
87
+ unchecked {{
88
+ {res_type} z = x {op['operator']} y;
89
+ {assembly_prefix} {{
90
+ mstore({encoded}, z)
91
+ }}
92
+ return z;
93
+ }}
94
+ }}
95
+ """
96
+
97
+
98
+ def add_instrumentation(inst_dict: dict[int, Instrumentation], k: int, v: Instrumentation) -> None:
99
+ if k in inst_dict:
100
+ old = inst_dict[k]
101
+ if isinstance(old.mut, InsertBefore) and isinstance(v.mut, InsertBefore):
102
+ inst_dict[k] = Instrumentation(expected=old.expected, mut=InsertBefore(),
103
+ to_ins=old.to_ins + v.to_ins)
104
+ elif isinstance(old.mut, InsertAfter) and isinstance(v.mut, InsertAfter):
105
+ inst_dict[k] = Instrumentation(expected=old.expected, mut=InsertAfter(),
106
+ to_ins=old.to_ins + v.to_ins)
107
+ elif isinstance(old.mut, Replace) and isinstance(v.mut, InsertBefore):
108
+ inst_dict[k] = Instrumentation(expected=old.expected, mut=old.mut,
109
+ to_ins=v.to_ins + old.to_ins)
110
+ elif isinstance(old.mut, InsertBefore) and isinstance(v.mut, Replace):
111
+ inst_dict[k] = Instrumentation(expected=old.expected, mut=v.mut,
112
+ to_ins=old.to_ins + v.to_ins)
113
+ else:
114
+ print(f"GOT A PROBLEM at {k} :::: {old} {v}")
115
+ # should warn here.
116
+ inst_dict[k] = v
117
+ else:
118
+ inst_dict[k] = v
119
+
120
+
121
+ def generate_overflow_instrumentation(asts: dict[str, dict[str, dict[int, Any]]], contract_file: str, sdc: SDC,
122
+ offset_converters: dict[str, OffsetConverter]) \
123
+ -> tuple[dict[str, dict[int, Instrumentation]], dict[str, tuple[str, list[str]]]]:
124
+ """
125
+ Generates the instrumentation for uncheckedOverflow builtin rule.
126
+ It replaces each of the possibly overflowing operations: `*, +, -`, with a function call to a new function
127
+ we add in a library in the same file. This function does the exact same operation, but adds an mload instruction
128
+ encoding the location of the operation and the expected resulting type.
129
+ """
130
+ overflow_instrumentation: dict[str, dict[int, Instrumentation]] = dict()
131
+ op_funcs: dict[str, tuple[str, list[str]]] = dict()
132
+ if not isinstance(sdc.compiler_collector, CompilerCollectorSol):
133
+ raise Exception(f"Encountered a compiler collector that is not solc for file {contract_file}"
134
+ " when trying to add casting instrumentation")
135
+ assembly_prefix = sdc.compiler_collector.gen_memory_safe_assembly_prefix()
136
+ counter = 0
137
+
138
+ original_files = sorted({Util.convert_path_for_solc_import(c.original_file) for c in sdc.contracts})
139
+ for file_count, solfile in enumerate(original_files, start=1):
140
+ main_ast = asts[contract_file]
141
+ libname, per_file_funcs = op_funcs.setdefault(solfile, (f"CertoraOverflowLib{file_count}", []))
142
+ curr_file_ast = main_ast.get(solfile, dict())
143
+ per_file_inst = overflow_instrumentation.setdefault(solfile, dict())
144
+
145
+ for op in find_unchecked(curr_file_ast):
146
+ counter += 1
147
+ for k, v in instrumentations(contract_file, libname, op, counter).items():
148
+ add_instrumentation(per_file_inst, k, v)
149
+ new_func = generate_overflow_function(offset_converters[solfile], assembly_prefix, op, counter)
150
+ per_file_funcs.append(new_func)
151
+
152
+ return overflow_instrumentation, op_funcs
@@ -92,6 +92,7 @@ class RunSources(Util.NoValEnum):
92
92
  BENCHMARK = auto()
93
93
  LIGHT_TEST = auto()
94
94
  REPORT = auto()
95
+ TIMEOUTER = auto()
95
96
 
96
97
 
97
98
  class WaitForResultOptions(Util.NoValEnum):
@@ -751,6 +752,36 @@ def validate_compiler_map(args: Dict[str, str]) -> None:
751
752
  "solc/vyper attribute can be used instead")
752
753
 
753
754
 
755
+ def validate_vyper_custom_std_json_in_map(args: Dict[str, str]) -> None:
756
+ """
757
+ Checks that the argument is a dictionary of the form
758
+ <vy_file_1>=<stdjson_in_file_1>,<vy_file_2>=<stdjson_in_file_2>,...
759
+ and that all vyper files are valid: they were found
760
+
761
+ :param args: argument of --vyper_custom_std_json_in
762
+ :raises CertoraUserInputError if the format is wrong
763
+ """
764
+ if not isinstance(args, dict):
765
+ raise Util.CertoraUserInputError(f"vyper custom std json in should be stored as a map (type was {type(args).__name__})")
766
+
767
+ for source_file, json_file in args.items():
768
+ if not Util.is_vyper_file(source_file):
769
+ raise Util.CertoraUserInputError(f"{source_file} does not end with {Util.VY_EXT}")
770
+ source_file_path = Path(source_file)
771
+ if not source_file_path.exists():
772
+ raise Util.CertoraUserInputError(f"Source vyper file {source_file} does not exist")
773
+
774
+ if not Path(json_file).exists():
775
+ raise Util.CertoraUserInputError(f"Custom standard-json input file {json_file} does not exist")
776
+
777
+ values = args.values()
778
+ first = list(values)[0]
779
+
780
+ if all(x == first for x in values):
781
+ validation_logger.warning("All vyper files will be compiled with the same standard-json in. "
782
+ "Usually different standard-json in files are needed")
783
+
784
+
754
785
  def validate_git_hash(git_hash: str) -> str:
755
786
  """
756
787
  Validates that correct input was inserted as a git commit hash. It must be between 1 and 40 hexadecimal digits.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: certora-cli-beta-mirror
3
- Version: 8.6.3
3
+ Version: 8.7.0
4
4
  Summary: Runner for the Certora Prover
5
5
  Home-page: https://pypi.org/project/certora-cli-beta-mirror
6
6
  Author: Certora
@@ -38,4 +38,4 @@ Dynamic: requires-dist
38
38
  Dynamic: requires-python
39
39
  Dynamic: summary
40
40
 
41
- Commit c4459c1. Build and Run scripts for executing the Certora Prover on Solidity smart contracts.
41
+ Commit 70ed750. Build and Run scripts for executing the Certora Prover on Solidity smart contracts.
@@ -12,9 +12,9 @@ certora_cli/certoraSorobanProver.py,sha256=SYJKz5Sw-N0bJrSa1njRCE53R9_PMz7IWLhfa
12
12
  certora_cli/certoraSuiProver.py,sha256=gRs-iihB35ZSEIQ5-hJN-wLgrHZlcfmpk76Wr6vza74,2827
13
13
  certora_cli/rustMutator.py,sha256=j1YdY5HOWQcRNbz7TOxp5c79J0YaaYHtWZ6m2mSTeGc,14865
14
14
  certora_cli/CertoraProver/__init__.py,sha256=QHNr-PJQAoyuPgTkO7gg23GRchiWSXglWNG7yLSQZvs,849
15
- certora_cli/CertoraProver/castingInstrumenter.py,sha256=4FDjQjnN9s8I3B9J-_G9rug9Jf3LnWnAuRngr50oTH4,7898
15
+ certora_cli/CertoraProver/castingInstrumenter.py,sha256=maG3J0G2Bb8uT3_xA-AOo0gpBKmwhRJcEqvORD4JwNI,8412
16
16
  certora_cli/CertoraProver/certoraApp.py,sha256=7pkQnUYMzP9V-Yo0jszNV9Y9aB760Q6hMZtlcXuuRp8,1660
17
- certora_cli/CertoraProver/certoraBuild.py,sha256=h6lGjUyTcC0EnRCK1DXD26Qez4DEphpFjx_IoQRiuO8,231608
17
+ certora_cli/CertoraProver/certoraBuild.py,sha256=Nc7EUrxI9w1qTAhmryZ93oIuCLx4X_8UGj22C5mQHPk,235997
18
18
  certora_cli/CertoraProver/certoraBuildCacheManager.py,sha256=DnVd7w92xjmg0DIrMgoJnCvaU0yCz7ySy0M4RiHEXEM,13648
19
19
  certora_cli/CertoraProver/certoraBuildDataClasses.py,sha256=RGeoZCS4OztfbEUak0Oq02wIPgrmAOwLBad6-_XvX8c,14936
20
20
  certora_cli/CertoraProver/certoraBuildRust.py,sha256=ZPbNp4ttRmzcKhFsgHSiHDRExNPaLOzgxTRqu23o1D0,6061
@@ -25,27 +25,29 @@ certora_cli/CertoraProver/certoraCollectRunMetadata.py,sha256=wjP6m8W2rBka15vcj4
25
25
  certora_cli/CertoraProver/certoraCompilerParameters.py,sha256=v-MIt4sDJSQ7vpEFmCt9XBkKBhdBbZuZA4PWNErJwcU,1807
26
26
  certora_cli/CertoraProver/certoraConfigIO.py,sha256=-1EhJYsiheYvyCgOOWrRCQBjqtqNXrpMKJYRq5cKJ0Y,8171
27
27
  certora_cli/CertoraProver/certoraContext.py,sha256=ezdLmR-tSDLTtokhTyS9DacnB3W64Ke_6e714Vuug3c,29196
28
- certora_cli/CertoraProver/certoraContextAttributes.py,sha256=3NCjyNAkEI9ZVQD1BsNBoyHDeAYSbWWBN1gKkj-PqsA,73180
28
+ certora_cli/CertoraProver/certoraContextAttributes.py,sha256=3LDHlGcSm44CkuuAmp9xk7juF3-aYyGpKi6kaFRmIP4,74469
29
29
  certora_cli/CertoraProver/certoraContextClass.py,sha256=d7HDqM72K7YnswR7kEcAHGwkFNrTqRz5-_0m7cl2Mso,900
30
- certora_cli/CertoraProver/certoraContextValidator.py,sha256=II5xHZH5j2LIZ7XP08uybzAZ-HcW778qXcS7qhMAyv8,46901
31
- certora_cli/CertoraProver/certoraContractFuncs.py,sha256=IKe4xbDh0yFoYVCuLeAxnGg9h59rQCla3Qpy0GwS3HE,8236
30
+ certora_cli/CertoraProver/certoraContextValidator.py,sha256=_dIga4q2r2_q4Nk9T9dolfHfRbeKpMpJRZx0KR0zdI0,47415
31
+ certora_cli/CertoraProver/certoraContractFuncs.py,sha256=1CAA7Y9OEmdV-8Qlwk3SxEuOPBd15cd4T5lSv4NPYY8,8245
32
32
  certora_cli/CertoraProver/certoraExtensionInfo.py,sha256=YlShzdoqJQgXXj3r0TJ3fir1KntIR99Rk8JN5qii2lk,2026
33
33
  certora_cli/CertoraProver/certoraJobList.py,sha256=FBIYgJ60I0Ok7vchfTbcuJJbiXgnfAhrONoVeZoHti4,11464
34
34
  certora_cli/CertoraProver/certoraMiniSpecParser.py,sha256=NjjMwf5Rav3YWpoOJh4PZ-QOS8exC2cg4yIBSbZA6l0,9660
35
35
  certora_cli/CertoraProver/certoraNodeFilters.py,sha256=5Uk2mixZKeis_JVd3HkLgoEVklkAYBXAZiNHRlXOIfY,2830
36
+ certora_cli/CertoraProver/certoraOffsetConverter.py,sha256=Z3ENmJQJpbBhb_Jyb9s-z27f4bV9ABzXrDweBySRWjs,2103
36
37
  certora_cli/CertoraProver/certoraParseBuildScript.py,sha256=l7KQA1poEjmbmuYbMskz0jOQg6cW0lM3vk5ruAGPjPI,4863
37
38
  certora_cli/CertoraProver/certoraProjectScanner.py,sha256=jT7FeWzcy8o83LrZRwsg_L4x6im6Fm_0LZFKVbKr3Jk,6862
38
- certora_cli/CertoraProver/certoraSourceFinders.py,sha256=qwJtwrQq3NUNYmdmn1UmANN4lmJFIUh4M-St2x1FJ2Y,19038
39
+ certora_cli/CertoraProver/certoraSourceFinders.py,sha256=UwM4ROH-QML1mD0eF_dKRZysL__CxwM-6mxhRFsXCfg,19037
39
40
  certora_cli/CertoraProver/certoraType.py,sha256=inwaLkMVwtJnwkyQhDJs-wRxoyytu2Xa_BJ5MdGlZqY,29737
40
41
  certora_cli/CertoraProver/certoraVerifyGenerator.py,sha256=YMuzGj2RNOnADOx8UnV2ys1ptw_-2mermgC9ZLMWceo,11052
41
42
  certora_cli/CertoraProver/erc7201.py,sha256=BME5kBZsDx6lgqLn7EE91I1cEOZtsnZ8BlRVF62eEBE,1660
42
43
  certora_cli/CertoraProver/splitRules.py,sha256=dNhy05ShB_-rWYTnJH5m-Xc5A4HGStAvwLRs1BTu1GA,7627
43
44
  certora_cli/CertoraProver/storageExtension.py,sha256=nrCrbH8ne-yCYSDFzh3J9A7Q6h96WxhEfLbfxGSUCSc,14363
44
- certora_cli/CertoraProver/Compiler/CompilerCollector.py,sha256=cr-PIl7LY9VfNs4s4H3-EnSnomPiCgXudfwP9-KenMk,6740
45
+ certora_cli/CertoraProver/uncheckedOverflowInstrumenter.py,sha256=0zgpQR0b08tdllI2kVwWMEIcsjer0b0L10FFEvTaKm0,7446
46
+ certora_cli/CertoraProver/Compiler/CompilerCollector.py,sha256=JX-52BQgildY2WNlDW8yBM6o6ND4cN5Ih_1t4KTHuvc,6849
45
47
  certora_cli/CertoraProver/Compiler/CompilerCollectorFactory.py,sha256=drMPTUz9cabxqIu2ngGp0x1ZZ_Jqqn-Db2qql97PTaw,8544
46
48
  certora_cli/CertoraProver/Compiler/CompilerCollectorSol.py,sha256=7nAY2FLMUlGJn4f_YoZMqpa3rf7THqhJVjLwTaChcBc,5027
47
49
  certora_cli/CertoraProver/Compiler/CompilerCollectorSolBased.py,sha256=UasYWyu8Of6R84vXsqRNGpscYcFQghmSIY_dyaAWDYA,1350
48
- certora_cli/CertoraProver/Compiler/CompilerCollectorVy.py,sha256=kPzB_qbSbIFNmJd2cQ89ULwvgiGs7OdI9N_w9raaM4Y,69981
50
+ certora_cli/CertoraProver/Compiler/CompilerCollectorVy.py,sha256=F7BoSVE8SzKh0uID6Vqs_osPRvKK1vqsxavmpalyVUo,72953
49
51
  certora_cli/CertoraProver/Compiler/CompilerCollectorYul.py,sha256=ZTyWIMtaf4gLPRM3_jjq58Gb0r5LE_giajz6sssIi0Y,5299
50
52
  certora_cli/CertoraProver/Compiler/__init__.py,sha256=tEFAmNyx9WL0kzpp_-4s7b6pLvxHmBWz6pQAq0yeROM,789
51
53
  certora_cli/EquivalenceCheck/Eq_default.conf,sha256=J-tMFzIuQa1NcklOh-wv2bpnikfAxFogpRFOl3qoSwM,164
@@ -67,15 +69,15 @@ certora_cli/Shared/__init__.py,sha256=s0dhvolFtsS4sRNzPVhC_rlw8mm194rCZ0WhOxInY4
67
69
  certora_cli/Shared/certoraAttrUtil.py,sha256=Nw8ban5Axp6c6dT-KJfCD9i9tKnGk1DbvRDDNH3--DU,8574
68
70
  certora_cli/Shared/certoraLogging.py,sha256=cV2UQMhQ5j8crGXgeq9CEamI-Lk4HgdiA3HCrP-kSR4,14013
69
71
  certora_cli/Shared/certoraUtils.py,sha256=WEvgDMqTeqC61Q0cW1inbTmGVEgFYawjq8H2vv26bmI,60574
70
- certora_cli/Shared/certoraValidateFuncs.py,sha256=gCM-YP0Tpngpasd2AWxhu90UNz1wtls3WqJYp18n_Q8,43503
72
+ certora_cli/Shared/certoraValidateFuncs.py,sha256=7m1cZVVeN1UaGdPtvjxDJBlDQF0Oq3gIkSexQsHKxuU,44893
71
73
  certora_cli/Shared/proverCommon.py,sha256=DUB-uEKjOkZ-8qil6xukPqfTynpigXW-gcrm0_kRUZY,11383
72
- certora_jars/ASTExtraction.jar,sha256=WW6yieuw0g2v_obN8ta_UNCo-s36aXSTalGrNlVcVlk,22010491
73
- certora_jars/CERTORA-CLI-VERSION-METADATA.json,sha256=X52jjlrZ6HZa4glSelbBDKPXahSrm2zQLw8TGtVuCHU,144
74
- certora_jars/Typechecker.jar,sha256=kCONcIoiI34Pn0MQn_cBI4Jvpxm41o0gi7syqLDWVok,21972603
74
+ certora_jars/ASTExtraction.jar,sha256=8iuLcI3Qm-SktYtxxqbMlssniudegphVMZz9VEEwMHQ,22265115
75
+ certora_jars/CERTORA-CLI-VERSION-METADATA.json,sha256=YXjQoHr6lx4JGa-7B4sW5gyDOiZkhpov1BtFTzi2R6U,143
76
+ certora_jars/Typechecker.jar,sha256=FrV3Zlx8zA5vqP2k7NGy4BvK1oj9RGqu0jVIjwvIbmE,22227227
75
77
  certora_jars/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
76
- certora_cli_beta_mirror-8.6.3.dist-info/LICENSE,sha256=UGKSKIJSetF8m906JLKqNLkUS2CL60XfQdNvxBvpQXo,620
77
- certora_cli_beta_mirror-8.6.3.dist-info/METADATA,sha256=j_4UsqDWfNyvCbiTBjK_gU_OWAuWPpVkAxxmSy-1d28,1254
78
- certora_cli_beta_mirror-8.6.3.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
79
- certora_cli_beta_mirror-8.6.3.dist-info/entry_points.txt,sha256=YXGQmR4tGdYD9lLdG_TEJkmVNrRauCtCDE88HwvO2Jo,569
80
- certora_cli_beta_mirror-8.6.3.dist-info/top_level.txt,sha256=8C77w3JLanY0-NW45vpJsjRssyCqVP-qmPiN9FjWiX4,38
81
- certora_cli_beta_mirror-8.6.3.dist-info/RECORD,,
78
+ certora_cli_beta_mirror-8.7.0.dist-info/LICENSE,sha256=UGKSKIJSetF8m906JLKqNLkUS2CL60XfQdNvxBvpQXo,620
79
+ certora_cli_beta_mirror-8.7.0.dist-info/METADATA,sha256=5uP-R_JenEwKwuLYnpFbDchACvJ4W8gjK2MIOuC2cbQ,1254
80
+ certora_cli_beta_mirror-8.7.0.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
81
+ certora_cli_beta_mirror-8.7.0.dist-info/entry_points.txt,sha256=YXGQmR4tGdYD9lLdG_TEJkmVNrRauCtCDE88HwvO2Jo,569
82
+ certora_cli_beta_mirror-8.7.0.dist-info/top_level.txt,sha256=8C77w3JLanY0-NW45vpJsjRssyCqVP-qmPiN9FjWiX4,38
83
+ certora_cli_beta_mirror-8.7.0.dist-info/RECORD,,
Binary file
@@ -1 +1 @@
1
- {"name": "certora-cli-beta-mirror", "tag": "8.6.3", "branch": "", "commit": "c4459c1", "timestamp": "20251218.13.53.990761", "version": "8.6.3"}
1
+ {"name": "certora-cli-beta-mirror", "tag": "8.7.0", "branch": "", "commit": "70ed750", "timestamp": "20260121.6.14.752745", "version": "8.7.0"}
Binary file