crosshair-tool 0.0.84__cp311-cp311-win32.whl → 0.0.86__cp311-cp311-win32.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. _crosshair_tracers.cp311-win32.pyd +0 -0
  2. crosshair/__init__.py +1 -1
  3. crosshair/_mark_stacks.h +0 -25
  4. crosshair/_tracers.h +2 -0
  5. crosshair/_tracers_test.py +8 -2
  6. crosshair/condition_parser.py +5 -5
  7. crosshair/condition_parser_test.py +1 -1
  8. crosshair/copyext.py +23 -7
  9. crosshair/copyext_test.py +11 -1
  10. crosshair/dynamic_typing.py +1 -1
  11. crosshair/fnutil_test.py +4 -1
  12. crosshair/libimpl/arraylib.py +0 -13
  13. crosshair/libimpl/builtinslib.py +26 -274
  14. crosshair/libimpl/builtinslib_test.py +1 -1
  15. crosshair/libimpl/collectionslib.py +13 -2
  16. crosshair/libimpl/collectionslib_test.py +10 -2
  17. crosshair/libimpl/timelib.py +34 -15
  18. crosshair/libimpl/timelib_test.py +12 -2
  19. crosshair/libimpl/typeslib_test.py +2 -1
  20. crosshair/lsp_server.py +1 -1
  21. crosshair/opcode_intercept.py +131 -47
  22. crosshair/opcode_intercept_test.py +97 -6
  23. crosshair/statespace.py +9 -4
  24. crosshair/tracers.py +27 -9
  25. crosshair/type_repo.py +2 -2
  26. crosshair/unicode_categories.py +1 -0
  27. crosshair/util.py +48 -17
  28. {crosshair_tool-0.0.84.dist-info → crosshair_tool-0.0.86.dist-info}/METADATA +1 -1
  29. {crosshair_tool-0.0.84.dist-info → crosshair_tool-0.0.86.dist-info}/RECORD +33 -33
  30. {crosshair_tool-0.0.84.dist-info → crosshair_tool-0.0.86.dist-info}/WHEEL +1 -1
  31. {crosshair_tool-0.0.84.dist-info → crosshair_tool-0.0.86.dist-info}/entry_points.txt +0 -0
  32. {crosshair_tool-0.0.84.dist-info → crosshair_tool-0.0.86.dist-info}/licenses/LICENSE +0 -0
  33. {crosshair_tool-0.0.84.dist-info → crosshair_tool-0.0.86.dist-info}/top_level.txt +0 -0
@@ -126,8 +126,19 @@ class ListBasedDeque(collections.abc.MutableSequence, CrossHairValue, Generic[T]
126
126
  prefix.reverse()
127
127
  self._contents = prefix + self._contents
128
128
 
129
- def index(self, item: T, *bounds) -> int:
130
- return self._contents.index(item, *bounds)
129
+ if sys.version_info >= (3, 14):
130
+
131
+ def index(self, item: T, *bounds) -> int:
132
+ try:
133
+ return self._contents.index(item, *bounds)
134
+ except ValueError as exc:
135
+ exc.args = ("deque.index(x): x not in deque",)
136
+ raise
137
+
138
+ else:
139
+
140
+ def index(self, item: T, *bounds) -> int:
141
+ return self._contents.index(item, *bounds)
131
142
 
132
143
  def insert(self, index: int, item: T) -> None:
133
144
  self._contents.insert(index, item)
@@ -1,3 +1,4 @@
1
+ import re
1
2
  import sys
2
3
  from collections import Counter, defaultdict, deque, namedtuple
3
4
  from copy import deepcopy
@@ -98,7 +99,11 @@ def test_deque_index_with_start_index_throws_correct_exception(test_list) -> Non
98
99
  with pytest.raises(ValueError) as context:
99
100
  test_list.index(1, 2)
100
101
 
101
- assert context.match("1 is not in list")
102
+ if sys.version_info >= (3, 14):
103
+ # assert context.match(re.escape("list.index(x): x not in list"))
104
+ assert context.match(re.escape("deque.index(x): x not in deque"))
105
+ else:
106
+ assert context.match("1 is not in list")
102
107
 
103
108
 
104
109
  def test_deque_index_with_start_and_end_index(test_list) -> None:
@@ -112,7 +117,10 @@ def test_deque_index_with_start_and_end_index_throws_correct_exception(
112
117
  with pytest.raises(ValueError) as context:
113
118
  test_list.index(6, 0, 1)
114
119
 
115
- assert context.match("6 is not in list")
120
+ if sys.version_info >= (3, 14):
121
+ assert context.match(re.escape("deque.index(x): x not in deque"))
122
+ else:
123
+ assert context.match("6 is not in list")
116
124
 
117
125
 
118
126
  def test_deque_insert(test_list) -> None:
@@ -1,27 +1,45 @@
1
1
  import time as real_time
2
2
  from inspect import Signature
3
3
  from math import isfinite
4
- from typing import Any, Callable
4
+ from typing import Any, Literal
5
5
 
6
- from crosshair.core import FunctionInterps
6
+ from crosshair.core import register_patch
7
7
  from crosshair.register_contract import register_contract
8
8
  from crosshair.statespace import context_statespace
9
9
  from crosshair.tracers import NoTracing
10
10
 
11
11
 
12
- def _gte_last(fn: Callable, value: Any) -> bool:
12
+ class EarliestPossibleTime:
13
+ monotonic: float = 0.0
14
+ process_time: float = 0.0
15
+
16
+ def __init__(self, *a):
17
+ pass
18
+
19
+
20
+ # Imprecision at high values becomes a sort of artificial problem
21
+ _UNREALISTICALLY_LARGE_TIME_FLOAT = float(60 * 60 * 24 * 365 * 100_000)
22
+
23
+
24
+ def _gte_last(kind: Literal["monotonic", "process_time"], value: Any) -> bool:
25
+ with NoTracing():
26
+ earliest_times = context_statespace().extra(EarliestPossibleTime)
27
+ threshold = getattr(earliest_times, kind)
28
+ setattr(earliest_times, kind, value)
29
+ return all([threshold <= value, value < _UNREALISTICALLY_LARGE_TIME_FLOAT])
30
+
31
+
32
+ def _sleep(value: float) -> None:
13
33
  with NoTracing():
14
- interps = context_statespace().extra(FunctionInterps)
15
- previous = interps._interpretations[fn]
16
- if len(previous) < 2:
17
- return True
18
- return value >= previous[-2]
34
+ earliest_times = context_statespace().extra(EarliestPossibleTime)
35
+ earliest_times.monotonic += value
36
+ return None
19
37
 
20
38
 
21
39
  def make_registrations():
22
40
  register_contract(
23
41
  real_time.time,
24
- post=lambda __return__: __return__ > 0.0,
42
+ post=lambda __return__: __return__ > 0.0 and isfinite(__return__),
25
43
  sig=Signature(parameters=[], return_annotation=float),
26
44
  )
27
45
  register_contract(
@@ -31,23 +49,24 @@ def make_registrations():
31
49
  )
32
50
  register_contract(
33
51
  real_time.monotonic,
34
- post=lambda __return__: isfinite(__return__)
35
- and _gte_last(real_time.monotonic, __return__),
52
+ post=lambda __return__: _gte_last("monotonic", __return__)
53
+ and isfinite(__return__),
36
54
  sig=Signature(parameters=[], return_annotation=float),
37
55
  )
38
56
  register_contract(
39
57
  real_time.monotonic_ns,
40
- post=lambda __return__: isfinite(__return__)
41
- and _gte_last(real_time.monotonic_ns, __return__),
58
+ post=lambda __return__: _gte_last("monotonic", __return__ / 1_000_000_000),
42
59
  sig=Signature(parameters=[], return_annotation=int),
43
60
  )
44
61
  register_contract(
45
62
  real_time.process_time,
46
- post=lambda __return__: _gte_last(real_time.process_time, __return__),
63
+ post=lambda __return__: _gte_last("process_time", __return__)
64
+ and isfinite(__return__),
47
65
  sig=Signature(parameters=[], return_annotation=float),
48
66
  )
49
67
  register_contract(
50
68
  real_time.process_time_ns,
51
- post=lambda __return__: _gte_last(real_time.process_time_ns, __return__),
69
+ post=lambda __return__: _gte_last("process_time", __return__ / 1_000_000_000),
52
70
  sig=Signature(parameters=[], return_annotation=int),
53
71
  )
72
+ register_patch(real_time.sleep, _sleep)
@@ -2,7 +2,7 @@ import time
2
2
 
3
3
  import pytest
4
4
 
5
- from crosshair.statespace import CONFIRMED, POST_FAIL
5
+ from crosshair.statespace import CANNOT_CONFIRM, CONFIRMED, POST_FAIL
6
6
  from crosshair.test_util import check_states
7
7
 
8
8
 
@@ -69,4 +69,14 @@ def test_monotonic_ns():
69
69
  start = time.monotonic_ns()
70
70
  return time.monotonic_ns() - start
71
71
 
72
- check_states(f, CONFIRMED)
72
+ check_states(f, CANNOT_CONFIRM)
73
+
74
+
75
+ def test_sleep():
76
+ def f():
77
+ """post: _ >= 60.0"""
78
+ start = time.monotonic()
79
+ time.sleep(60.01)
80
+ return time.monotonic() - start
81
+
82
+ check_states(f, CANNOT_CONFIRM)
@@ -30,6 +30,7 @@ def test_mappingproxy_deep_realize(space):
30
30
  assert type(copy) is MappingProxyType
31
31
  with ResumedTracing():
32
32
  val_from_orig = orig[key]
33
- val_from_copy = copy[key]
33
+ realized_key = deep_realize(key)
34
+ val_from_copy = copy[realized_key]
34
35
  assert type(val_from_orig) is SymbolicInt
35
36
  assert type(val_from_copy) is int
crosshair/lsp_server.py CHANGED
@@ -86,7 +86,7 @@ def publish_messages(
86
86
  if message.state < MessageType.PRE_UNSAT:
87
87
  continue
88
88
  # TODO: consider server.show_message_log()ing the long description
89
- diagnostics.append(get_diagnostic(message, doc.lines if doc else ()))
89
+ diagnostics.append(get_diagnostic(message, doc.lines if doc else []))
90
90
  server.publish_diagnostics(uri, diagnostics)
91
91
  if not diagnostics:
92
92
  # After we publish an empty set, it's safe to forget about the file:
@@ -1,10 +1,11 @@
1
1
  import dis
2
2
  import sys
3
3
  import weakref
4
+ from collections import defaultdict
4
5
  from collections.abc import MutableMapping, Set
5
6
  from sys import version_info
6
7
  from types import CodeType, FrameType
7
- from typing import Callable
8
+ from typing import Any, Callable, Iterable, Mapping, Tuple, Union
8
9
 
9
10
  from crosshair.core import (
10
11
  ATOMIC_IMMUTABLE_TYPES,
@@ -13,23 +14,32 @@ from crosshair.core import (
13
14
  )
14
15
  from crosshair.libimpl.builtinslib import (
15
16
  AnySymbolicStr,
17
+ AtomicSymbolicValue,
18
+ ModelingDirector,
16
19
  SymbolicBool,
17
20
  SymbolicInt,
18
21
  SymbolicList,
22
+ python_types_using_atomic_symbolics,
19
23
  )
20
24
  from crosshair.simplestructs import LinearSet, ShellMutableSet, SimpleDict, SliceView
21
25
  from crosshair.statespace import context_statespace
22
26
  from crosshair.tracers import (
23
27
  COMPOSITE_TRACER,
24
28
  NoTracing,
29
+ ResumedTracing,
25
30
  TracingModule,
26
31
  frame_stack_read,
27
32
  frame_stack_write,
28
33
  )
29
- from crosshair.util import CrossHairInternal, CrossHairValue
34
+ from crosshair.util import (
35
+ CROSSHAIR_EXTRA_ASSERTS,
36
+ CrossHairInternal,
37
+ CrossHairValue,
38
+ debug,
39
+ )
30
40
  from crosshair.z3util import z3Not, z3Or
31
41
 
32
- BINARY_SUBSCR = dis.opmap["BINARY_SUBSCR"]
42
+ BINARY_SUBSCR = dis.opmap.get("BINARY_SUBSCR", 256)
33
43
  BINARY_SLICE = dis.opmap.get("BINARY_SLICE", 256)
34
44
  BUILD_STRING = dis.opmap["BUILD_STRING"]
35
45
  COMPARE_OP = dis.opmap["COMPARE_OP"]
@@ -60,19 +70,119 @@ _DEEPLY_CONCRETE_KEY_TYPES = (
60
70
  )
61
71
 
62
72
 
73
+ class MultiSubscriptableContainer:
74
+ """Used for indexing a symbolic (non-slice) key into a concrete container"""
75
+
76
+ def __init__(self, container: Union[list, tuple, dict]):
77
+ self.container = container
78
+
79
+ def __getitem__(self, key: AtomicSymbolicValue) -> object:
80
+ with NoTracing():
81
+ space = context_statespace()
82
+ container = self.container
83
+ if isinstance(container, Mapping):
84
+ kv_pairs: Iterable[Tuple[Any, Any]] = container.items()
85
+ else:
86
+ in_bounds = space.smt_fork(
87
+ z3Or(-len(container) <= key.var, key.var < len(container)),
88
+ desc=f"index_in_bounds",
89
+ probability_true=0.9,
90
+ )
91
+ if not in_bounds:
92
+ raise IndexError
93
+ kv_pairs = enumerate(container)
94
+
95
+ values_by_type = defaultdict(list)
96
+ values_by_id = {}
97
+ keys_by_value_id = defaultdict(list)
98
+ symbolic_for_pytype = space.extra(ModelingDirector).choose
99
+ for cur_key, cur_value in kv_pairs:
100
+ if (
101
+ isinstance(cur_value, AtomicSymbolicValue)
102
+ or type(cur_value) in python_types_using_atomic_symbolics()
103
+ ):
104
+ pytype = (
105
+ cur_value._pytype()
106
+ if isinstance(cur_value, AtomicSymbolicValue)
107
+ else type(cur_value)
108
+ )
109
+ # Some types like real-based float and symbolic types don't cover all values:
110
+ if (
111
+ symbolic_for_pytype(pytype)._smt_promote_literal(cur_value)
112
+ is not None
113
+ ):
114
+ values_by_type[pytype].append((cur_key, cur_value))
115
+ continue
116
+ # No symbolics cover this value, but we might still find repeated values:
117
+ values_by_id[id(cur_value)] = cur_value
118
+ keys_by_value_id[id(cur_value)].append(cur_key)
119
+ for value_type, cur_pairs in values_by_type.items():
120
+ hypothetical_result = symbolic_for_pytype(value_type)(
121
+ "item_at_" + space.uniq(), value_type
122
+ )
123
+ with ResumedTracing():
124
+ condition_pairs = []
125
+ for cur_key, cur_val in cur_pairs:
126
+ keys_equal = key == cur_key
127
+ values_equal = hypothetical_result == cur_val
128
+ with NoTracing():
129
+ if isinstance(keys_equal, SymbolicBool):
130
+ condition_pairs.append((keys_equal, values_equal))
131
+ elif keys_equal is False:
132
+ pass
133
+ else:
134
+ # (because the key must be symbolic, we don't ever expect raw True)
135
+ raise CrossHairInternal(
136
+ f"key comparison type: {type(keys_equal)} {keys_equal}"
137
+ )
138
+ if any(keys_equal for keys_equal, _ in condition_pairs):
139
+ space.add(any([all(pair) for pair in condition_pairs]))
140
+ return hypothetical_result
141
+
142
+ for (value_id, value), probability_true in with_uniform_probabilities(
143
+ values_by_id.items()
144
+ ):
145
+ keys_for_value = keys_by_value_id[value_id]
146
+ with ResumedTracing():
147
+ is_match = any([key == k for k in keys_for_value])
148
+ if isinstance(is_match, SymbolicBool):
149
+ if space.smt_fork(
150
+ is_match.var,
151
+ probability_true=probability_true,
152
+ ):
153
+ return value
154
+ elif is_match:
155
+ return value
156
+
157
+ if type(container) is dict:
158
+ raise KeyError # ( f"Key {key} not found in dict")
159
+ else:
160
+ raise IndexError # (f"Index {key} out of range for list/tuple of length {len(container)}")
161
+
162
+
63
163
  class SymbolicSubscriptInterceptor(TracingModule):
64
- opcodes_wanted = frozenset([BINARY_SUBSCR])
164
+ opcodes_wanted = frozenset([BINARY_SUBSCR, BINARY_OP])
65
165
 
66
166
  def trace_op(self, frame, codeobj, codenum):
67
- # Note that because this is called from inside a Python trace handler, tracing
68
- # is automatically disabled, so there's no need for a `with NoTracing():` guard.
167
+ if codenum == BINARY_OP:
168
+ oparg = frame_op_arg(frame)
169
+ if oparg != 26: # subscript operator, NB_SUBSCR
170
+ return
171
+
69
172
  key = frame_stack_read(frame, -1)
70
173
  if isinstance(key, _DEEPLY_CONCRETE_KEY_TYPES):
71
174
  return
72
175
  # If we got this far, the index is likely symbolic (or perhaps a slice object)
73
176
  container = frame_stack_read(frame, -2)
74
177
  container_type = type(container)
75
- if container_type is dict:
178
+ if isinstance(key, AtomicSymbolicValue) and type(container) in (
179
+ tuple,
180
+ list,
181
+ dict,
182
+ ):
183
+ wrapped_container = MultiSubscriptableContainer(container)
184
+ frame_stack_write(frame, -2, wrapped_container)
185
+ elif container_type is dict:
76
186
  # SimpleDict won't hash the keys it's given!
77
187
  wrapped_dict = SimpleDict(list(container.items()))
78
188
  frame_stack_write(frame, -2, wrapped_dict)
@@ -84,41 +194,6 @@ class SymbolicSubscriptInterceptor(TracingModule):
84
194
  if isinstance(start, SymbolicInt) or isinstance(stop, SymbolicInt):
85
195
  view_wrapper = SliceView(container, 0, len(container))
86
196
  frame_stack_write(frame, -2, SymbolicList(view_wrapper))
87
- elif container_type is list or container_type is tuple:
88
- if not isinstance(key, SymbolicInt):
89
- return
90
- # We can't stay symbolic with a concrete list and symbolic numeric index.
91
- # But we can make the choice evenly and combine duplicate values, if any.
92
-
93
- space = context_statespace()
94
- in_bounds = space.smt_fork(
95
- z3Or(-len(container) <= key.var, key.var < len(container)),
96
- desc=f"index_in_bounds",
97
- probability_true=0.9,
98
- )
99
- if not in_bounds:
100
- return
101
- # TODO: `container` should be the same (per path node) on every run;
102
- # it would be great to cache this computation somehow.
103
- indices = {}
104
- for idx, value in enumerate(container):
105
- value_id = id(value)
106
- if value_id in indices:
107
- indices[value_id].append(idx)
108
- else:
109
- indices[value_id] = [idx]
110
- for value_id, probability_true in with_uniform_probabilities(
111
- indices.keys()
112
- ):
113
- indices_with_value = indices[value_id]
114
- if space.smt_fork(
115
- z3Or(*[key.var == i for i in indices_with_value]),
116
- desc=f"index_to_{'_or_'.join(map(str, indices_with_value))}",
117
- probability_true=probability_true,
118
- ):
119
- # avoids realization of `key` in case `container` has duplicates
120
- frame_stack_write(frame, -1, indices_with_value[0])
121
- break
122
197
 
123
198
 
124
199
  class SymbolicSliceInterceptor(TracingModule):
@@ -345,7 +420,9 @@ class MapAddInterceptor(TracingModule):
345
420
  # Afterwards, overwrite the interpreter's resulting dict with ours:
346
421
  def post_op():
347
422
  old_dict_obj = frame_stack_read(frame, dict_offset + 2)
348
- if not isinstance(old_dict_obj, (dict, MutableMapping)):
423
+ if CROSSHAIR_EXTRA_ASSERTS and not isinstance(
424
+ old_dict_obj, (dict, MutableMapping)
425
+ ):
349
426
  raise CrossHairInternal("interpreter stack corruption detected")
350
427
  frame_stack_write(frame, dict_offset + 2, dict_obj)
351
428
 
@@ -427,7 +504,8 @@ class SetAddInterceptor(TracingModule):
427
504
  # Set and value are concrete; continue as normal.
428
505
  return
429
506
  # Have the interpreter do a fake addition, namely `set().add(1)`
430
- frame_stack_write(frame, set_offset, set())
507
+ dummy_set: Set = set()
508
+ frame_stack_write(frame, set_offset, dummy_set)
431
509
  frame_stack_write(frame, -1, 1)
432
510
 
433
511
  # And do our own addition separately:
@@ -435,6 +513,12 @@ class SetAddInterceptor(TracingModule):
435
513
 
436
514
  # Later, overwrite the interpreter's result with ours:
437
515
  def post_op():
516
+ if CROSSHAIR_EXTRA_ASSERTS:
517
+ to_replace = frame_stack_read(frame, set_offset + 1)
518
+ if to_replace is not dummy_set:
519
+ raise CrossHairInternal(
520
+ f"Found an instance of {type(to_replace)} where dummy set should be."
521
+ )
438
522
  frame_stack_write(frame, set_offset + 1, set_obj)
439
523
 
440
524
  COMPOSITE_TRACER.set_postop_callback(post_op, frame)
@@ -450,9 +534,9 @@ class IdentityInterceptor(TracingModule):
450
534
  def trace_op(self, frame: FrameType, codeobj: CodeType, codenum: int) -> None:
451
535
  arg1 = frame_stack_read(frame, -1)
452
536
  arg2 = frame_stack_read(frame, -2)
453
- if isinstance(arg1, SymbolicBool):
537
+ if isinstance(arg1, SymbolicBool) and isinstance(arg2, (bool, SymbolicBool)):
454
538
  frame_stack_write(frame, -1, arg1.__ch_realize__())
455
- if isinstance(arg2, SymbolicBool):
539
+ if isinstance(arg2, SymbolicBool) and isinstance(arg1, (bool, SymbolicBool)):
456
540
  frame_stack_write(frame, -2, arg2.__ch_realize__())
457
541
 
458
542
 
@@ -467,7 +551,7 @@ class ModuloInterceptor(TracingModule):
467
551
  if isinstance(left, str):
468
552
  if codenum == BINARY_OP:
469
553
  oparg = frame_op_arg(frame)
470
- if oparg != 6: # modulo operator (determined experimentally)
554
+ if oparg != 6: # modulo operator, NB_REMAINDER
471
555
  return
472
556
  frame_stack_write(frame, -2, DeoptimizedPercentFormattingStr(left))
473
557
 
@@ -1,10 +1,19 @@
1
+ import math
1
2
  import sys
3
+ from abc import ABCMeta
2
4
  from typing import List, Set
3
5
 
4
6
  import pytest
5
7
 
6
8
  from crosshair.core_and_libs import NoTracing, proxy_for_type, standalone_statespace
7
- from crosshair.statespace import POST_FAIL, MessageType
9
+ from crosshair.libimpl.builtinslib import (
10
+ ModelingDirector,
11
+ RealBasedSymbolicFloat,
12
+ SymbolicBool,
13
+ SymbolicInt,
14
+ SymbolicType,
15
+ )
16
+ from crosshair.statespace import POST_FAIL
8
17
  from crosshair.test_util import check_states
9
18
  from crosshair.tracers import ResumedTracing
10
19
  from crosshair.z3util import z3And
@@ -23,7 +32,73 @@ def test_dict_index():
23
32
  check_states(numstr, POST_FAIL)
24
33
 
25
34
 
26
- def test_concrete_list_with_symbolic_index_deduplicates_values(space):
35
+ def test_dict_index_without_realization(space):
36
+ class WithMeta(metaclass=ABCMeta):
37
+ pass
38
+
39
+ space.extra(ModelingDirector).global_representations[float] = RealBasedSymbolicFloat
40
+ a = {
41
+ -1: WithMeta,
42
+ # ^ tests regression: isinstance(WithMeta(), type) but type(WithMeta) != type
43
+ 0: list,
44
+ 1.0: 10.0,
45
+ 2: 20,
46
+ 3: 30,
47
+ 4: 40,
48
+ ("complex", "key"): 50,
49
+ 6: math.inf,
50
+ 7: math.inf,
51
+ }
52
+ int_key = proxy_for_type(int, "int_key")
53
+ int_key2 = proxy_for_type(int, "int_key2")
54
+ int_key3 = proxy_for_type(int, "int_key3")
55
+ float_key = RealBasedSymbolicFloat("float_key")
56
+ float_key2 = RealBasedSymbolicFloat("float_key2")
57
+ with ResumedTracing():
58
+ # Try some concrete values out first:
59
+ assert a[("complex", "key")] == 50
60
+ assert a[6] == float("inf")
61
+ try:
62
+ a[42]
63
+ assert False, "Expected KeyError for missing key 42"
64
+ except KeyError:
65
+ pass
66
+
67
+ space.add(2 <= int_key)
68
+ space.add(int_key <= 4)
69
+ int_result = a[int_key]
70
+ assert space.is_possible(int_result == 20)
71
+ assert space.is_possible(int_result == 40)
72
+ assert not space.is_possible(int_result == 10)
73
+ space.add(float_key == 1.0)
74
+ float_result = a[float_key]
75
+ assert space.is_possible(float_result == 10.0)
76
+ assert not space.is_possible(float_result == 42.0)
77
+ space.add(float_key2 == 2.0)
78
+ float_result2 = a[float_key2]
79
+ assert space.is_possible(float_result2 == 20)
80
+ space.add(int_key2 == 0)
81
+ int_result2 = a[int_key2]
82
+ assert int_result2 == list
83
+ space.add(any([int_key3 == 6, int_key3 == 7]))
84
+ inf_result = a[int_key3]
85
+ assert inf_result is math.inf
86
+ assert isinstance(int_result, SymbolicInt)
87
+ assert isinstance(float_result, RealBasedSymbolicFloat)
88
+ assert isinstance(float_result2, SymbolicInt)
89
+ assert isinstance(int_result2, SymbolicType)
90
+
91
+
92
+ def test_dict_symbolic_index_miss(space):
93
+ a = {6: 60, 7: 70}
94
+ x = proxy_for_type(int, "x")
95
+ with ResumedTracing():
96
+ space.add(x <= 4)
97
+ with pytest.raises(KeyError):
98
+ result = a[x]
99
+
100
+
101
+ def test_concrete_list_with_symbolic_index_simple(space):
27
102
  haystack = [False] * 13 + [True] + [False] * 11
28
103
 
29
104
  idx = proxy_for_type(int, "idx")
@@ -31,8 +106,12 @@ def test_concrete_list_with_symbolic_index_deduplicates_values(space):
31
106
  space.add(0 <= idx)
32
107
  space.add(idx < len(haystack))
33
108
  ret = haystack[idx]
34
- assert ret
35
- assert not space.is_possible(idx != 13)
109
+ assert isinstance(ret, SymbolicBool)
110
+ with ResumedTracing():
111
+ assert space.is_possible(idx == 13)
112
+ assert space.is_possible(idx == 12)
113
+ space.add(ret)
114
+ assert not space.is_possible(idx == 12)
36
115
 
37
116
 
38
117
  def test_concrete_list_with_symbolic_index_unhashable_values(space):
@@ -60,7 +139,7 @@ def test_dict_key_containment():
60
139
  check_states(numstr, POST_FAIL)
61
140
 
62
141
 
63
- def test_dict_comprehension():
142
+ def test_dict_comprehension_basic():
64
143
  with standalone_statespace as space:
65
144
  with NoTracing():
66
145
  x = proxy_for_type(int, "x")
@@ -139,7 +218,7 @@ def test_not_operator_on_non_bool():
139
218
  assert notList
140
219
 
141
220
 
142
- def test_set_comprehension():
221
+ def test_set_comprehension_basic():
143
222
  with standalone_statespace as space:
144
223
  with NoTracing():
145
224
  x = proxy_for_type(int, "x")
@@ -195,3 +274,15 @@ def test_identity_operator_on_booleans():
195
274
  b1 = proxy_for_type(bool, "b1")
196
275
  space.add(b1)
197
276
  assert b1 is True
277
+
278
+
279
+ @pytest.mark.skipif(sys.version_info < (3, 9), reason="IS_OP is new in Python 3.9")
280
+ def test_identity_operator_does_not_realize_on_differing_types():
281
+ with standalone_statespace as space:
282
+ with NoTracing():
283
+ b1 = proxy_for_type(bool, "b1")
284
+ choices_made_at_start = len(space.choices_made)
285
+ space.add(b1)
286
+ fourty_two = 42 # assignment just to avoid lint errors
287
+ b1 is fourty_two
288
+ assert len(space.choices_made) == choices_made_at_start
crosshair/statespace.py CHANGED
@@ -743,11 +743,13 @@ class StateSpace:
743
743
  model_check_timeout: float,
744
744
  search_root: RootNode,
745
745
  ):
746
- smt_timeout = model_check_timeout * 1000 + 1
747
746
  smt_tactic = z3.Tactic("smt")
748
- if smt_timeout < 1 << 63:
749
- smt_tactic = z3.TryFor(smt_tactic, int(smt_timeout))
750
747
  self.solver = smt_tactic.solver()
748
+ if model_check_timeout < 1 << 63:
749
+ self.smt_timeout: Optional[int] = int(model_check_timeout * 1000 + 1)
750
+ self.solver.set(timeout=self.smt_timeout)
751
+ else:
752
+ self.smt_timeout = None
751
753
  self.solver.set(mbqi=True)
752
754
  # turn off every randomization thing we can think of:
753
755
  self.solver.set("random-seed", 42)
@@ -1097,7 +1099,10 @@ class StateSpace:
1097
1099
  else:
1098
1100
  # Give ourselves a time extension for deferred assumptions and
1099
1101
  # (likely) counterexample generation to follow.
1100
- self.execution_deadline += 2.0
1102
+ self.execution_deadline += 4.0
1103
+ if self.smt_timeout is not None:
1104
+ self.smt_timeout = self.smt_timeout * 2
1105
+ self.solver.set(timeout=self.smt_timeout)
1101
1106
  for description, checker in self._deferred_assumptions:
1102
1107
  with ResumedTracing():
1103
1108
  check_ret = checker()
crosshair/tracers.py CHANGED
@@ -132,6 +132,14 @@ def handle_call_function_ex_3_13(frame) -> CallStackInfo:
132
132
  return (idx, NULL_POINTER, kwargs_idx) # type: ignore
133
133
 
134
134
 
135
+ def handle_call_function_ex_3_14(frame) -> CallStackInfo:
136
+ callable_idx, kwargs_idx = -4, -1
137
+ try:
138
+ return (callable_idx, frame_stack_read(frame, callable_idx), kwargs_idx)
139
+ except ValueError:
140
+ return (callable_idx, NULL_POINTER, kwargs_idx) # type: ignore
141
+
142
+
135
143
  def handle_call_method(frame) -> CallStackInfo:
136
144
  idx = -(frame.f_code.co_code[frame.f_lasti + 1] + 2)
137
145
  try:
@@ -148,9 +156,13 @@ _CALL_HANDLERS: Dict[int, Callable[[object], CallStackInfo]] = {
148
156
  CALL_KW: handle_call_kw,
149
157
  CALL_FUNCTION: handle_call_function,
150
158
  CALL_FUNCTION_KW: handle_call_function_kw,
151
- CALL_FUNCTION_EX: handle_call_function_ex_3_13
152
- if sys.version_info >= (3, 13)
153
- else handle_call_function_ex_3_6,
159
+ CALL_FUNCTION_EX: handle_call_function_ex_3_14
160
+ if sys.version_info >= (3, 14)
161
+ else (
162
+ handle_call_function_ex_3_13
163
+ if sys.version_info >= (3, 13)
164
+ else handle_call_function_ex_3_6
165
+ ),
154
166
  CALL_METHOD: handle_call_method,
155
167
  }
156
168
 
@@ -236,12 +248,18 @@ class TracingModule:
236
248
  target = __func
237
249
 
238
250
  if kwargs_idx is not None:
239
- kwargs_dict = frame_stack_read(frame, kwargs_idx)
240
- replacement_kwargs = {
241
- key.__ch_realize__() if hasattr(key, "__ch_realize__") else key: val
242
- for key, val in kwargs_dict.items()
243
- }
244
- frame_stack_write(frame, kwargs_idx, replacement_kwargs)
251
+ try:
252
+ kwargs_dict = frame_stack_read(frame, kwargs_idx)
253
+ except ValueError:
254
+ pass
255
+ else:
256
+ replacement_kwargs = {
257
+ # TODO: I don't think it's safe to realize in the middle of a tracing operation.
258
+ # Need to confirm with test. I guess we have to wrap the callable instead?
259
+ key.__ch_realize__() if hasattr(key, "__ch_realize__") else key: val
260
+ for key, val in kwargs_dict.items()
261
+ }
262
+ frame_stack_write(frame, kwargs_idx, replacement_kwargs)
245
263
 
246
264
  if isinstance(target, Untracable):
247
265
  return None