umaudemc 0.15.1__py3-none-any.whl → 0.16.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
umaudemc/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = '0.15.1'
1
+ __version__ = '0.16.0'
umaudemc/__main__.py CHANGED
@@ -316,6 +316,11 @@ def build_parser():
316
316
  '--distribute',
317
317
  help='Distribute the computation over some machines'
318
318
  )
319
+ parser_scheck.add_argument(
320
+ '-D',
321
+ action='append',
322
+ help='Define a constant to be used in QuaTEx expressions'
323
+ )
319
324
  parser_scheck.add_argument(
320
325
  '--format', '-f',
321
326
  help='Output format for the simulation results',
@@ -327,6 +332,10 @@ def build_parser():
327
332
  help='Plot the results of parametric queries (using Matplotlib)',
328
333
  action='store_true'
329
334
  )
335
+ parser_scheck.add_argument(
336
+ '--dump',
337
+ help='Dump query evaluations into the given file',
338
+ )
330
339
 
331
340
  parser_scheck.set_defaults(mode='scheck')
332
341
 
umaudemc/api.py CHANGED
@@ -420,7 +420,7 @@ class MaudeModel:
420
420
  return result, stats
421
421
 
422
422
  def scheck(self, quatex, assign='uniform', alpha=0.05, delta=0.5, block=30, nsims=(30, None),
423
- seed=None, jobs=1, usermsgs=_usermsgs, verbose=False):
423
+ seed=None, jobs=1, usermsgs=_usermsgs, verbose=False, constants=None):
424
424
  """
425
425
  Statistical model checking of a given QuaTEx expression
426
426
 
@@ -444,15 +444,17 @@ class MaudeModel:
444
444
  :type usermsgs: An object with print_error, print_warning and print_info methods
445
445
  :param verbose: Enable verbose messages about the simulation state between blocks
446
446
  :type verbose: bool
447
+ :param constants: Constants to be used in the QuaTEx expression
448
+ :type constants: dict[str, float]
447
449
  :returns: the probabilistic model-checking result and a dictionary with statistics
448
450
  """
449
451
 
450
452
  # Parse the QuaTEx query
451
453
  if isinstance(quatex, str) and os.path.exists(quatex):
452
454
  with open(quatex) as quatex_file:
453
- program = _quatex.parse_quatex(quatex_file, filename=quatex)
455
+ program = _quatex.parse_quatex(quatex_file, filename=quatex, constants=constants)
454
456
  else:
455
- program = _quatex.parse_quatex(quatex)
457
+ program = _quatex.parse_quatex(quatex, constants=constants)
456
458
 
457
459
  if not program:
458
460
  return None
@@ -22,7 +22,10 @@ def show_results(program, nsims, qdata):
22
22
  for k, (fname, line, column, params) in enumerate(program.query_locations):
23
23
  # Print the query name and location only if there are many
24
24
  if program.nqueries > 1:
25
- print(f'Query {k + 1} ({fname}:{line}:{column})')
25
+ # If the number of simulation is lower for this query
26
+ sim_detail = f' ({q.n} simulations)' if q.n != nsims else ''
27
+
28
+ print(f'Query {k + 1} ({fname}:{line}:{column}){sim_detail}')
26
29
 
27
30
  # For parametric queries, we show the result for every value
28
31
  var = params[0] if params else None
@@ -33,6 +36,11 @@ def show_results(program, nsims, qdata):
33
36
  else:
34
37
  print(f' μ = {q.mu:<25} σ = {q.s:<25} r = {q.h}')
35
38
 
39
+ # If executions have been discarded
40
+ if q.discarded:
41
+ total = q.discarded + q.n
42
+ print(f' where {q.discarded} executions out of {total} ({round(q.discarded / total * 100, 2)}%) have been discarded')
43
+
36
44
  q = next(qdata_it, None)
37
45
 
38
46
 
@@ -121,6 +129,31 @@ def parse_range(rtext):
121
129
  return lims
122
130
 
123
131
 
132
+ def parse_defines(defines):
133
+ """Parse constant definitions with -D in the command line"""
134
+
135
+ constants = {}
136
+
137
+ if not defines:
138
+ return constants
139
+
140
+ for df in defines:
141
+ if df.count('=') != 1:
142
+ usermsgs.print_error(f'The argument of -D must be key=value, not {df}. This definition will be ignored.')
143
+ continue
144
+
145
+ key, value = df.strip().split('=')
146
+
147
+ try:
148
+ constants[key] = float(value)
149
+
150
+ except ValueError:
151
+ usermsgs.print_error(f'The value of a -D assignment must be a number, not {value}. This definition will be ignored.')
152
+ continue
153
+
154
+ return constants
155
+
156
+
124
157
  def scheck(args):
125
158
  """Statistical check subcommand"""
126
159
 
@@ -135,8 +168,13 @@ def scheck(args):
135
168
  usermsgs.print_error(f'The query file "{args.query}" does not exist.')
136
169
  return 1
137
170
 
171
+ # Parse the constants definitions in the command line
172
+ constants = parse_defines(args.D)
173
+
138
174
  with open(args.query) as quatex_file:
139
- program, seen_files = parse_quatex(quatex_file, filename=args.query, legacy=args.assign == 'pmaude')
175
+ program, seen_files = parse_quatex(quatex_file, filename=args.query,
176
+ legacy=args.assign == 'pmaude',
177
+ constants=constants)
140
178
 
141
179
  if not program:
142
180
  return 1
@@ -167,7 +205,7 @@ def scheck(args):
167
205
  if args.distribute:
168
206
  from ..distributed import distributed_check
169
207
 
170
- num_sims, qdata = distributed_check(args, data, min_sim, max_sim, program, seen_files)
208
+ num_sims, qdata = distributed_check(args, data, min_sim, max_sim, program, constants, seen_files)
171
209
 
172
210
  if num_sims is None:
173
211
  return 1
@@ -182,7 +220,12 @@ def scheck(args):
182
220
  # Call the statistical model checker
183
221
  num_sims, qdata = check(program, simulator,
184
222
  args.seed, args.alpha, args.delta, args.block,
185
- min_sim, max_sim, args.jobs, args.verbose)
223
+ min_sim, max_sim, args.jobs, args.verbose, args.dump)
224
+
225
+ # Check the discarded count when there is no convergence
226
+ for query in qdata:
227
+ if not query.converged:
228
+ query.discarded = num_sims - query.n
186
229
 
187
230
  # Print the results on the terminal
188
231
  (show_json if args.format == 'json' else show_results)(program, num_sims, qdata)
@@ -71,7 +71,8 @@ class Worker:
71
71
 
72
72
  with open(os.path.join(tmp_dir, args.query)) as quatex_file:
73
73
  self.program, _ = parse_quatex(quatex_file, filename=args.query,
74
- legacy=args.assign == 'pmaude')
74
+ legacy=args.assign == 'pmaude',
75
+ constants=args.constants)
75
76
 
76
77
  if not self.program:
77
78
  return False
@@ -92,12 +93,15 @@ class Worker:
92
93
  block = self.block
93
94
 
94
95
  # Query data
95
- qdata = [QueryData(k, idict)
96
+ # (delta, its second argument, does not matter because
97
+ # convergence is not evaluated by the worker)
98
+ qdata = [QueryData(k, 1.0, idict)
96
99
  for k, qinfo in enumerate(program.query_locations)
97
100
  for idict in make_parameter_dicts(qinfo[3])]
98
101
 
99
102
  sums = array('d', [0.0] * len(qdata))
100
103
  sum_sq = array('d', [0.0] * len(qdata))
104
+ counts = array('i', [0] * len(qdata))
101
105
 
102
106
  while True:
103
107
 
@@ -105,11 +109,13 @@ class Worker:
105
109
  # Run the simulation and compute all queries at once
106
110
  values = run(program, qdata, simulator)
107
111
 
108
- for k in range(len(qdata)):
109
- sums[k] += values[k]
110
- sum_sq[k] += values[k] * values[k]
112
+ for k, value in enumerate(values):
113
+ if value is not None:
114
+ sums[k] += value
115
+ sum_sq[k] += value * value
116
+ counts[k] += 1
111
117
 
112
- conn.send(b'b' + sums.tobytes() + sum_sq.tobytes())
118
+ conn.send(b'b' + sums.tobytes() + sum_sq.tobytes() + counts.tobytes())
113
119
 
114
120
  # Check whether to continue
115
121
  answer = conn.recv(1)
@@ -125,6 +131,7 @@ class Worker:
125
131
  for k in range(len(qdata)):
126
132
  sums[k] = 0
127
133
  sum_sq[k] = 0
134
+ counts[k] = 0
128
135
 
129
136
 
130
137
  def handle_request(message, conn, addr, keep_file):
umaudemc/distributed.py CHANGED
@@ -9,6 +9,7 @@ import random
9
9
  import re
10
10
  import selectors
11
11
  import socket
12
+ import sys
12
13
  import tarfile
13
14
  from array import array
14
15
  from contextlib import ExitStack
@@ -174,7 +175,7 @@ def process_dspec(dspec, fname):
174
175
  return True
175
176
 
176
177
 
177
- def setup_workers(args, initial_data, dspec, seen_files, stack):
178
+ def setup_workers(args, initial_data, dspec, constants, seen_files, stack):
178
179
  """Setup workers and send problem data"""
179
180
 
180
181
  workers = dspec['workers']
@@ -188,7 +189,7 @@ def setup_workers(args, initial_data, dspec, seen_files, stack):
188
189
  COPY = ('initial', 'strategy', 'module', 'metamodule', 'opaque', 'full_matchrew',
189
190
  'purge_fails', 'merge_states', 'assign', 'block', 'query', 'assign', 'advise', 'verbose')
190
191
 
191
- data = {key: args.__dict__[key] for key in COPY} | {'file': 'source.maude'}
192
+ data = {key: args.__dict__[key] for key in COPY} | {'file': 'source.maude', 'constants': constants}
192
193
 
193
194
  # Make a flattened version of the Maude file
194
195
  flat_source = io.BytesIO()
@@ -200,6 +201,10 @@ def setup_workers(args, initial_data, dspec, seen_files, stack):
200
201
  # Save the sockets for each worker
201
202
  sockets = []
202
203
 
204
+ # Root of the QuaTEx sources
205
+ quatex_root = os.path.commonpath([os.path.dirname(fn) for fn in seen_files])
206
+ data['query'] = os.path.relpath(data['query'], start=quatex_root)
207
+
203
208
  for worker, seed in zip(workers, seeds):
204
209
  address, port = worker['address'], worker['port']
205
210
 
@@ -229,12 +234,8 @@ def setup_workers(args, initial_data, dspec, seen_files, stack):
229
234
  tarf.addfile(flat_info, flat_source)
230
235
 
231
236
  for file in seen_files:
232
- relpath = os.path.relpath(file)
233
-
234
- if relpath.startswith('..'):
235
- usermsgs.print_error('QuaTEx file outside the working tree, it will not be included and the execution will fail.')
236
- else:
237
- tarf.add(relpath)
237
+ relpath = os.path.relpath(file, start=quatex_root)
238
+ tarf.add(file, arcname=relpath)
238
239
 
239
240
  fobj.flush()
240
241
 
@@ -248,7 +249,7 @@ def setup_workers(args, initial_data, dspec, seen_files, stack):
248
249
  return sockets
249
250
 
250
251
 
251
- def distributed_check(args, initial_data, min_sim, max_sim, program, seen_files):
252
+ def distributed_check(args, initial_data, min_sim, max_sim, program, constants, seen_files):
252
253
  """Distributed statistical model checking"""
253
254
 
254
255
  # Load the distribution specification
@@ -260,10 +261,10 @@ def distributed_check(args, initial_data, min_sim, max_sim, program, seen_files)
260
261
  with ExitStack() as stack:
261
262
 
262
263
  # Socket to connect with the workers
263
- if not (sockets := setup_workers(args, initial_data, dspec, seen_files, stack)):
264
+ if not (sockets := setup_workers(args, initial_data, dspec, constants, seen_files, stack)):
264
265
  return None, None
265
266
 
266
- print('All workers are ready. Starting...')
267
+ print('All workers are ready. Starting...', file=sys.stderr)
267
268
 
268
269
  # Use a selector to wait for updates from any worker
269
270
  selector = selectors.DefaultSelector()
@@ -273,9 +274,10 @@ def distributed_check(args, initial_data, min_sim, max_sim, program, seen_files)
273
274
  sock.send(b'c')
274
275
 
275
276
  buffer = array('d')
277
+ ibuffer = array('i')
276
278
 
277
279
  # Query data
278
- qdata = [QueryData(k, idict)
280
+ qdata = [QueryData(k, args.delta, idict)
279
281
  for k, qinfo in enumerate(program.query_locations)
280
282
  for idict in make_parameter_dicts(qinfo[3])]
281
283
  nqueries = len(qdata)
@@ -293,16 +295,19 @@ def distributed_check(args, initial_data, min_sim, max_sim, program, seen_files)
293
295
  answer = sock.recv(1)
294
296
 
295
297
  if answer == b'b':
296
- data = sock.recv(16 * nqueries)
297
- buffer.frombytes(data)
298
+ data = sock.recv(24 * nqueries)
299
+ buffer.frombytes(data[:16 * nqueries])
300
+ ibuffer.frombytes(data[16 * nqueries:])
298
301
 
299
302
  for k in range(nqueries):
300
303
  qdata[k].sum += buffer[k]
301
304
  qdata[k].sum_sq += buffer[nqueries + k]
305
+ qdata[k].n += ibuffer[k]
302
306
 
303
307
  num_sims += key.data['block']
304
308
 
305
309
  del buffer[:]
310
+ del ibuffer[:]
306
311
  finished.append(key.fileobj)
307
312
 
308
313
  else:
@@ -311,7 +316,7 @@ def distributed_check(args, initial_data, min_sim, max_sim, program, seen_files)
311
316
  sockets.remove(key.fileobj)
312
317
 
313
318
  # Check whether the simulation has converged
314
- converged = check_interval(qdata, num_sims, args.alpha, args.delta, quantile, args.verbose)
319
+ converged = check_interval(qdata, num_sims, min_sim, args.alpha, quantile, args.verbose)
315
320
 
316
321
  if converged or max_sim and num_sims >= max_sim:
317
322
  break
umaudemc/pyslang.py CHANGED
@@ -47,6 +47,11 @@ class Instruction:
47
47
  def __repr__(self):
48
48
  return f'Instruction({self.NAMES[self.type]}, {self.extra})'
49
49
 
50
+ @property
51
+ def does_rewrite(self):
52
+ """Whether the instruction rewrites the subject term"""
53
+ return self.type in (self.RLAPP, self.RWCNEXT) and self.extra[0]
54
+
50
55
 
51
56
  class StratProgram:
52
57
  """Compiled strategy program"""
@@ -506,7 +511,7 @@ class StratCompiler:
506
511
 
507
512
  self.generate(next(s.arguments()), p, False)
508
513
 
509
- initial_jump.extra = (p.pc + 1, initial_pc)
514
+ initial_jump.extra = (p.pc + (2 if self.use_kleene else 1), initial_pc)
510
515
  p.append(Instruction.JUMP, (p.pc + 1, initial_pc))
511
516
 
512
517
  if self.use_kleene:
@@ -658,12 +663,20 @@ class StratCompiler:
658
663
  def norm(self, s, p, tail):
659
664
  # A variation of the conditional and the iteration
660
665
  initial_pc = p.pc
666
+
667
+ if self.use_kleene:
668
+ p.append(Instruction.KLEENE, (initial_pc, True))
669
+
661
670
  subsearch = p.append(Instruction.SUBSEARCH)
662
671
  self.generate(next(s.arguments()), p, False)
663
672
  p.append(Instruction.NOFAIL)
664
673
  p.append(Instruction.JUMP, (initial_pc, ))
674
+
665
675
  subsearch.extra = p.pc
666
676
 
677
+ if self.use_kleene:
678
+ p.append(Instruction.KLEENE, (initial_pc, True))
679
+
667
680
  def call(self, s, p, tail):
668
681
  # Strategy calls are executed by CALL instructions, but the body of
669
682
  # their definitions should be generated too. This is done afterwards
@@ -869,9 +882,6 @@ class StratCompiler:
869
882
  def __repr__(self):
870
883
  return f'BasicBlock({self.start}, {self.length}, {self.has_rewrite}, {self.next}, {self.reachable})'
871
884
 
872
- def does_rewrite(inst):
873
- return inst.type == Instruction.RLAPP or inst.type == Instruction.RWCNEXT and inst.extra[0]
874
-
875
885
  # (1) Abstract the code as a graph of blocks linked by jumps
876
886
 
877
887
  # Blocks are aggregated by definition (the first entry is the initial expression)
@@ -891,7 +901,7 @@ class StratCompiler:
891
901
  inst = p[k]
892
902
 
893
903
  # This block contains a rewrite
894
- if does_rewrite(inst):
904
+ if inst.does_rewrite:
895
905
  current.has_rewrite = True
896
906
 
897
907
  # JUMP, CHOICE, SUBSEARCH, and CALL instructions close a block
@@ -1099,7 +1109,7 @@ class StratCompiler:
1099
1109
  # notify_pending is not possible within a rewriting condition
1100
1110
 
1101
1111
  # There is a RLAPP, so we need to issue a NOTIFY for the previous one
1102
- if does_rewrite(inst):
1112
+ if inst.does_rewrite:
1103
1113
  notify_points.append(k)
1104
1114
 
1105
1115
  # Failures discard the notification
@@ -1116,7 +1126,7 @@ class StratCompiler:
1116
1126
  notify_points.append(k)
1117
1127
  notify_pending = False
1118
1128
 
1119
- elif rwc_level == 0 and does_rewrite(inst):
1129
+ elif rwc_level == 0 and inst.does_rewrite:
1120
1130
  notify_pending = True
1121
1131
 
1122
1132
  for sc in block.next:
umaudemc/quatex.py CHANGED
@@ -144,6 +144,11 @@ class QuaTExLexer:
144
144
  self.sline = self.line
145
145
  self.scolumn = self.column
146
146
 
147
+ @staticmethod
148
+ def _is_name(c):
149
+ """Whether the character is allowed in a name"""
150
+ return c.isalnum() or c == '$'
151
+
147
152
  def get_token(self):
148
153
  """Get the next token from the stream"""
149
154
 
@@ -166,9 +171,9 @@ class QuaTExLexer:
166
171
  self.ltype = self.LT_STRING
167
172
  self._capture_string()
168
173
 
169
- elif c.isalpha():
174
+ elif c.isalpha() or c == '$':
170
175
  self.ltype = self.LT_NAME
171
- self._capture(str.isalnum)
176
+ self._capture(self._is_name)
172
177
 
173
178
  elif c.isdecimal():
174
179
  self.ltype = self.LT_NUMBER
@@ -223,7 +228,7 @@ class QuaTExParser:
223
228
  UNARY_OPS = ('!', )
224
229
  UNARY_AST = (ast.Not, )
225
230
 
226
- def __init__(self, source, filename='<stdin>', legacy=False):
231
+ def __init__(self, source, filename='<stdin>', legacy=False, constants=None):
227
232
  # Filename is only used for diagnostics
228
233
  self.lexer = QuaTExLexer(source, filename)
229
234
  # PMaude legacy syntax
@@ -238,6 +243,8 @@ class QuaTExParser:
238
243
  # Whether the variables that may occur
239
244
  # in an expression are known
240
245
  self.known_vars = True
246
+ # Constants defined outside
247
+ self.constants = {} if constants is None else constants
241
248
 
242
249
  # State stack for parsing expressions
243
250
  self.stack = []
@@ -448,6 +455,13 @@ class QuaTExParser:
448
455
  inside_next = True
449
456
  call_name, call_line, call_column = token, line, column
450
457
 
458
+ elif token == 'discard' and not (inside_cond or call_name):
459
+ if current:
460
+ self._eprint('misplaced discard keyword.')
461
+ return None
462
+
463
+ current = ast.Constant(None)
464
+
451
465
  elif token == ',':
452
466
  if current and self._in_state(self.PS_CALLARGS):
453
467
  arg_stack[-1].append(current)
@@ -542,14 +556,20 @@ class QuaTExParser:
542
556
 
543
557
  # Simply a variable
544
558
  else:
559
+ current = ast.Name(token, ast.Load())
560
+
545
561
  if not self.known_vars:
546
562
  self.fvars.append((token, line, column))
547
563
 
548
564
  elif token not in self.fvars:
549
- self._eprint(f'unknown variable "{token}".', line=line, column=column)
550
- self.ok = False
565
+ # The variable is an externally-defined constant
566
+ if token.startswith('$') and (value := self.constants.get(token[1:])) is not None:
567
+ current = ast.Constant(value)
568
+
569
+ else:
570
+ self._eprint(f'unknown variable "{token}".', line=line, column=column)
571
+ self.ok = False
551
572
 
552
- current = ast.Name(token, ast.Load())
553
573
 
554
574
  # We continue with the peeked token
555
575
  token = next_token
@@ -846,10 +866,10 @@ class QuaTExParser:
846
866
  tuple((fname, line, column, params) for fname, line, column, _, params in self.queries))
847
867
 
848
868
 
849
- def parse_quatex(input_file, filename='<string>', legacy=False):
869
+ def parse_quatex(input_file, filename='<string>', legacy=False, constants=None):
850
870
  """Parse a QuaTEx formula"""
851
871
 
852
872
  # Load, parse, and compile the QuaTEx file
853
- parser = QuaTExParser(input_file, filename=filename, legacy=legacy)
873
+ parser = QuaTExParser(input_file, filename=filename, legacy=legacy, constants=constants)
854
874
 
855
875
  return parser.parse(), parser.seen_files
umaudemc/simulators.py CHANGED
@@ -27,7 +27,11 @@ def parse_hole_term(module, term_str):
27
27
 
28
28
  # Collect all variables in the term
29
29
  varset = set()
30
- collect_vars(term, varset)
30
+
31
+ if term.isVariable():
32
+ varset.add(term)
33
+ else:
34
+ collect_vars(term, varset)
31
35
 
32
36
  if len(varset) > 1:
33
37
  usermsgs.print_warning('The observation "{message}" '
umaudemc/statistical.py CHANGED
@@ -2,6 +2,7 @@
2
2
  # Statistical model-checking engine
3
3
  #
4
4
 
5
+ import contextlib
5
6
  import math
6
7
  import os
7
8
  import random
@@ -40,7 +41,7 @@ def run(program, qdata, simulator):
40
41
  # List where to store the results of each query
41
42
  results = [None] * len(qdata)
42
43
  # Remaining queries for being processed
43
- remaining = list(range(len(qdata)))
44
+ remaining = [k for k, q in enumerate(qdata) if not q.converged]
44
45
 
45
46
  # Variables when evaluating expressions
46
47
  cvars = [{'rval': simulator.rval, **q.params} for q in qdata]
@@ -62,8 +63,16 @@ def run(program, qdata, simulator):
62
63
  # Execute the compiled slot
63
64
  value = eval(program.slots[pc[k]], cvars[k])
64
65
 
66
+ # The expression finishes with a call
67
+ if isinstance(value, tuple):
68
+ has_next, jump, *args = value
69
+
70
+ pc[k] = jump
71
+ cvars[k] = dict(zip(program.varnames[pc[k]], args),
72
+ rval=simulator.rval)
73
+
65
74
  # The evaluation of the k-th query has finished
66
- if isinstance(value, float):
75
+ else:
67
76
  remaining.pop(index)
68
77
  results[k] = value
69
78
 
@@ -73,13 +82,6 @@ def run(program, qdata, simulator):
73
82
 
74
83
  break
75
84
 
76
- # The expression finishes with a call
77
- else:
78
- has_next, jump, *args = value
79
-
80
- pc[k] = jump
81
- cvars[k] = dict(zip(program.varnames[pc[k]], args),
82
- rval=simulator.rval)
83
85
 
84
86
  if has_next:
85
87
  index += 1
@@ -91,11 +93,13 @@ def run(program, qdata, simulator):
91
93
  class QueryData:
92
94
  """Data associated to a query under evaluation"""
93
95
 
94
- def __init__(self, query, params):
96
+ def __init__(self, query, delta, params):
95
97
  # Query expression index
96
98
  self.query = query
97
99
  # Initial dictionary of variable values
98
100
  self.params = params
101
+ # Radius of the confidence interval
102
+ self.delta = delta
99
103
 
100
104
  # Sum of the query outcomes
101
105
  self.sum = 0.0
@@ -107,6 +111,13 @@ class QueryData:
107
111
  self.s = 0.0
108
112
  # Radius of the confidence interval
109
113
  self.h = 0.0
114
+ # Number of runs
115
+ self.n = 0
116
+ # Whether the query has converged
117
+ self.converged = False
118
+
119
+ # Number of discarded runs
120
+ self.discarded = 0
110
121
 
111
122
 
112
123
  def make_parameter_dicts(qinfo):
@@ -123,34 +134,47 @@ def make_parameter_dicts(qinfo):
123
134
  x += step
124
135
 
125
136
 
126
- def check_interval(qdata, num_sims, alpha, delta, quantile, verbose):
137
+ def check_interval(qdata, num_sims, min_sim, alpha, quantile, verbose):
127
138
  """Check the confidence interval"""
128
139
 
129
- # The radius of encloses the confidence level in the reference
130
- # distribution for calculating confidence intervals
131
- tinv = quantile(num_sims - 1, 1 - alpha / 2) / math.sqrt(num_sims)
132
-
133
140
  # Whether the size of the confidence interval for all queries have converged
134
141
  converged = True
135
142
 
136
143
  for query in qdata:
137
- query.mu = query.sum / num_sims
138
- query.s = math.sqrt(max(query.sum_sq - query.sum * query.mu, 0.0) / (num_sims - 1))
144
+ # This query has already converged
145
+ if query.converged:
146
+ continue
147
+ # All executions of this query have been discarded
148
+ elif query.n == 0:
149
+ converged = False
150
+ continue
151
+
152
+ # The radius encloses the confidence level in the reference
153
+ # distribution for calculating confidence intervals
154
+ tinv = quantile(query.n - 1, 1 - alpha / 2) / math.sqrt(query.n)
155
+
156
+ query.mu = query.sum / query.n
157
+ query.s = math.sqrt(max(query.sum_sq - query.sum * query.mu, 0.0) / (query.n - 1))
139
158
  query.h = query.s * tinv
140
159
 
141
- if query.h > delta:
160
+ if query.h <= query.delta and query.n >= min_sim:
161
+ query.converged = True
162
+ query.discarded = num_sims - query.n
163
+ else:
142
164
  converged = False
143
165
 
144
166
  # Print intermediate results if in verbose mode
145
167
  if verbose:
146
- usermsgs.print_info(f' step={num_sims} μ={" ".join(str(q.mu) for q in qdata)}'
168
+ usermsgs.print_info(f' step={num_sims} n={" ".join(str(q.n) for q in qdata)}'
169
+ f' μ={" ".join(str(q.mu) for q in qdata)}'
147
170
  f' σ={" ".join(str(q.s) for q in qdata)}'
148
171
  f' r={" ".join(str(q.h) for q in qdata)}')
149
172
 
150
173
  return converged
151
174
 
152
175
 
153
- def run_single(program, qdata, num_sims, max_sim, simulator, alpha, delta, block_size, verbose=False):
176
+ def run_single(program, qdata, num_sims, min_sim, max_sim, simulator, alpha, block_size,
177
+ verbose=False, dump=None):
154
178
  """Run simulation in a single thread"""
155
179
 
156
180
  # Size of the first block of execution (it coincides with num_sims
@@ -166,11 +190,17 @@ def run_single(program, qdata, num_sims, max_sim, simulator, alpha, delta, block
166
190
  # Run the simulation and compute all queries at once
167
191
  values = run(program, qdata, simulator)
168
192
 
169
- for k, query in enumerate(qdata):
170
- query.sum += values[k]
171
- query.sum_sq += values[k] * values[k]
193
+ # Dump evaluations if required
194
+ if dump:
195
+ print(*values, file=dump)
172
196
 
173
- converged = check_interval(qdata, num_sims, alpha, delta, quantile, verbose)
197
+ for value, query in zip(values, qdata):
198
+ if value is not None:
199
+ query.sum += value
200
+ query.sum_sq += value * value
201
+ query.n += 1
202
+
203
+ converged = check_interval(qdata, num_sims, min_sim, alpha, quantile, verbose)
174
204
 
175
205
  if converged or max_sim and num_sims >= max_sim:
176
206
  break
@@ -181,7 +211,7 @@ def run_single(program, qdata, num_sims, max_sim, simulator, alpha, delta, block
181
211
  return num_sims, qdata
182
212
 
183
213
 
184
- def thread_main(program, qdata, simulator, num_sims, block_size, seed, queue, barrier, more):
214
+ def thread_main(program, qdata, simulator, num_sims, block_size, seed, queue, barrier, more, dump=None):
185
215
  """Entry point of a calculating thread"""
186
216
 
187
217
  maude.setRandomSeed(seed)
@@ -189,8 +219,12 @@ def thread_main(program, qdata, simulator, num_sims, block_size, seed, queue, ba
189
219
 
190
220
  block = num_sims
191
221
 
222
+ # Open dump file for the raw data
223
+ dump_file = open(dump, 'w') if dump else None
224
+
192
225
  sums = [0.0] * len(qdata)
193
226
  sum_sq = [0.0] * len(qdata)
227
+ counts = [0] * len(qdata)
194
228
 
195
229
  # Repeat until the main process says we are done
196
230
  while True:
@@ -199,17 +233,23 @@ def thread_main(program, qdata, simulator, num_sims, block_size, seed, queue, ba
199
233
  # Run the simulation and compute all queries at once
200
234
  values = run(program, qdata, simulator)
201
235
 
236
+ if dump is not None:
237
+ print(*values, file=dump_file)
238
+
202
239
  for k in range(len(qdata)):
203
- sums[k] += values[k]
204
- sum_sq[k] += values[k] * values[k]
240
+ if values[k] is not None:
241
+ sums[k] += values[k]
242
+ sum_sq[k] += values[k] * values[k]
243
+ counts[k] += 1
205
244
 
206
245
  # Send the results to the main process and wait for it
207
- queue.put((sums, sum_sq))
246
+ queue.put((sums, sum_sq, counts))
208
247
  barrier.wait()
209
248
 
210
249
  for k in range(len(qdata)):
211
250
  sums[k] = 0.0
212
251
  sum_sq[k] = 0.0
252
+ counts[k] = 0
213
253
 
214
254
  if not more.value:
215
255
  break
@@ -218,7 +258,7 @@ def thread_main(program, qdata, simulator, num_sims, block_size, seed, queue, ba
218
258
  block = block_size
219
259
 
220
260
 
221
- def run_parallel(program, qdata, num_sims, max_sim, simulator, alpha, delta, block_size, jobs, verbose=False):
261
+ def run_parallel(program, qdata, num_sims, min_sim, max_sim, simulator, alpha, block_size, jobs, verbose=False, dump=None):
222
262
  """Run the simulation in multiple threads"""
223
263
  import multiprocessing as mp
224
264
  mp.set_start_method('fork', force=True)
@@ -234,6 +274,8 @@ def run_parallel(program, qdata, num_sims, max_sim, simulator, alpha, delta, blo
234
274
 
235
275
  # Random number seeds
236
276
  seeds = [random.getrandbits(20) for _ in range(jobs)]
277
+ # Dump file names
278
+ dumps = [f'{dump}.{os.getpid()}-{k}' for k in range(jobs)] if dump else ([None] * jobs)
237
279
  # Queue for transferring the query evaluations
238
280
  queue = mp.Queue()
239
281
  barrier = mp.Barrier(jobs + 1)
@@ -243,7 +285,7 @@ def run_parallel(program, qdata, num_sims, max_sim, simulator, alpha, delta, blo
243
285
  processes = [mp.Process(target=thread_main,
244
286
  args=(program, qdata, simulator, num_sims // jobs + (k < rest),
245
287
  block_size // jobs + (k < rest_block),
246
- seeds[k], queue, barrier, more)) for k in range(jobs)]
288
+ seeds[k], queue, barrier, more, dumps[k])) for k in range(jobs)]
247
289
 
248
290
  # Start all processes
249
291
  for p in processes:
@@ -252,13 +294,14 @@ def run_parallel(program, qdata, num_sims, max_sim, simulator, alpha, delta, blo
252
294
  # Exactly as in run_single but with several threads
253
295
  while True:
254
296
  for _ in range(jobs):
255
- sums, sum_sq = queue.get()
297
+ sums, sum_sq, counts = queue.get()
256
298
 
257
299
  for k, query in enumerate(qdata):
258
300
  query.sum += sums[k]
259
301
  query.sum_sq += sum_sq[k]
302
+ query.n += counts[k]
260
303
 
261
- converged = check_interval(qdata, num_sims, alpha, delta, quantile, verbose)
304
+ converged = check_interval(qdata, num_sims, min_sim, alpha, quantile, verbose)
262
305
 
263
306
  if converged or max_sim and num_sims >= max_sim:
264
307
  break
@@ -288,27 +331,30 @@ def qdata_to_dict(num_sims, qdata, program):
288
331
  for k, (fname, line, column, params) in enumerate(program.query_locations):
289
332
  # For parametric queries, we return an array of values
290
333
  if params:
291
- mean, std, radius = [], [], []
334
+ mean, std, radius, count, discarded = [], [], [], [], []
292
335
 
293
336
  while q and q.query == k:
294
337
  mean.append(q.mu)
295
338
  std.append(q.s)
296
339
  radius.append(q.h)
340
+ count.append(q.n)
341
+ discarded.append(q.discarded)
297
342
  q = next(qdata_it, None)
298
343
 
299
344
  # We also write information about the parameter
300
345
  param_info = {'params': [dict(name=params[0], start=params[1], step=params[2], stop=params[3])]}
301
346
 
302
347
  else:
303
- mean, std, radius = q.mu, q.s, q.h
348
+ mean, std, radius, count, discarded = q.mu, q.s, q.h, q.n, q.discarded
304
349
  param_info = {}
305
350
 
306
- queries.append(dict(mean=mean, std=std, radius=radius, file=fname, line=line, column=column, **param_info))
351
+ queries.append(dict(mean=mean, std=std, radius=radius, file=fname, line=line, column=column,
352
+ nsims=count, discarded=discarded, **param_info))
307
353
 
308
354
  return dict(nsims=num_sims, queries=queries)
309
355
 
310
356
 
311
- def check(program, simulator, seed, alpha, delta, block, min_sim, max_sim, jobs, verbose=False):
357
+ def check(program, simulator, seed, alpha, delta, block, min_sim, max_sim, jobs, verbose=False, dump=None):
312
358
  """Run the statistical model checker"""
313
359
 
314
360
  # The number of simulations for the first block
@@ -324,14 +370,15 @@ def check(program, simulator, seed, alpha, delta, block, min_sim, max_sim, jobs,
324
370
 
325
371
  # Each query maintains some data like the sum of the outcomes
326
372
  # and the sum of their squares
327
- qdata = [QueryData(k, idict)
373
+ qdata = [QueryData(k, delta, idict)
328
374
  for k, qinfo in enumerate(program.query_locations)
329
375
  for idict in make_parameter_dicts(qinfo[3])]
330
376
 
331
377
  # Run the simulations
332
378
  if jobs == 1 and num_sims != 1:
333
- return run_single(program, qdata, num_sims, max_sim, simulator, alpha,
334
- delta, block, verbose=verbose)
379
+ with (open(dump, 'w') if dump else contextlib.nullcontext()) as dump_file:
380
+ return run_single(program, qdata, num_sims, min_sim, max_sim, simulator, alpha,
381
+ block, verbose=verbose, dump=dump_file)
335
382
  else:
336
- return run_parallel(program, qdata, num_sims, max_sim, simulator, alpha,
337
- delta, block, jobs, verbose=verbose)
383
+ return run_parallel(program, qdata, num_sims, min_sim, max_sim, simulator, alpha,
384
+ block, jobs, verbose=verbose, dump=dump)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: umaudemc
3
- Version: 0.15.1
3
+ Version: 0.16.0
4
4
  Summary: Unified Maude model-checking utility
5
5
  Author-email: ningit <ningit@users.noreply.github.com>
6
6
  License-Expression: GPL-3.0-or-later
@@ -1,10 +1,10 @@
1
- umaudemc/__init__.py,sha256=8WhJAZouJDJEIY8zYgYIOv2VtMy_b_0q-sscUgsm7U0,23
2
- umaudemc/__main__.py,sha256=x7HLEryX--lIKW1JYjF66XLZQ9lUnAQmd2ADGRipEfo,14823
3
- umaudemc/api.py,sha256=I-o5foy8NUlO4JT4pX9L7kkuHQG_8_GMkWlOKt708E8,19733
1
+ umaudemc/__init__.py,sha256=ZkVXSbnNkhhpmMRr5ur6FqBcUYuqHyK0KUV5Je_XFn8,23
2
+ umaudemc/__main__.py,sha256=zmgS0amNTdNZ4i2fhg860uLechFnBthvPNFBbFgUSJc,15039
3
+ umaudemc/api.py,sha256=naZ5edEbvx-S-NU29yAAJtqglfYnSAYVS2RJNyxJMQQ,19893
4
4
  umaudemc/backends.py,sha256=mzJkALYwcKPInT0lBiRsCxJSewKvx5j_akQsqWN1Ezo,4590
5
5
  umaudemc/common.py,sha256=UcIf7hTpP2qjcT9u_9-UcYR0nNeosx1xRZW7wsuT2bE,7305
6
6
  umaudemc/counterprint.py,sha256=vVqM_UjGRk_xeftFxBGI5m6cQXV7mf8KvbQ_fvAvSQk,9226
7
- umaudemc/distributed.py,sha256=2InONr9a4-n8lFVMWr57Hai3Rbuq6m4K-X4aDD1dYgE,8842
7
+ umaudemc/distributed.py,sha256=CljCg0VzLG7pDsDb_q1Lc95OVVjrU02cadkHRz9O8qY,9112
8
8
  umaudemc/formatter.py,sha256=nbQlIsR5Xv18OEcpJdnTDGqO9xGL_amvBGFMU2OmheU,6026
9
9
  umaudemc/formulae.py,sha256=jZPPDhjgsb7cs5rWvitiQoO0fd8JIlK98at2SN-LzVE,12156
10
10
  umaudemc/grapher.py,sha256=K1chKNNlEzQvfOsiFmRPJmd9OpxRIrg6OyiMW6gqOCU,4348
@@ -14,11 +14,11 @@ umaudemc/kleene.py,sha256=sW5SGdXpbLrjGtihPn8qgnhSH5WgltFaLVRx6GLwQU4,4697
14
14
  umaudemc/mproc.py,sha256=9X5pTb3Z3XHcdOo8ynH7I5RZQpjzm9xr4IBbEtaglUE,11766
15
15
  umaudemc/opsem.py,sha256=Xfdi9QGy-vcpmQ9ni8lBDAlKNw-fCRzYr6wnPbv6m1s,9448
16
16
  umaudemc/probabilistic.py,sha256=MNvFeEd84-OYedSnyksZB87UckPfwizVNJepCItgRy8,29306
17
- umaudemc/pyslang.py,sha256=zOfVGtfnOWDGghtaYLfQHq61KvbzVFmAM_0-upNhrTk,87753
18
- umaudemc/quatex.py,sha256=SQAbVz1csGXGqcfzFcjP89BdIpN8K2aiwP_PMLGPr1o,23239
17
+ umaudemc/pyslang.py,sha256=ABSXYUQO2TmDq8EZ3EpVZV8NecZ0p0gERlSvLUIVAm8,87970
18
+ umaudemc/quatex.py,sha256=Je5g16Tzb1t9NtHPSww0W6wUTIrHPdQCOJN-bTliOnQ,23888
19
19
  umaudemc/resources.py,sha256=qKqvgLYTJVtsQHQMXFObyCLTo6-fssQeu_mG7tvVyD0,932
20
- umaudemc/simulators.py,sha256=Lk50Ql7hWUasWkQSWxboeR5LYfJtpwrANjUDuxYjuZ4,13232
21
- umaudemc/statistical.py,sha256=buthWv4ovvxsvDs0eWgJw7lX2_9BsnLsW_PxW17RHCI,9087
20
+ umaudemc/simulators.py,sha256=ZGDpQjFj2Sv4GLq-NGVBMH78cFiG45KFPKfAfH1ds9w,13283
21
+ umaudemc/statistical.py,sha256=FY3yXvv9NRiwYOQdwLDRf4WTXG1QupGJU8KwdHQhJyo,10534
22
22
  umaudemc/terminal.py,sha256=B4GWLyW4Sdymgoavj418y4TI4MnWqNu3JS4BBoSYeTc,1037
23
23
  umaudemc/usermsgs.py,sha256=h4VPxljyKidEI8vpPcToKJA6mcLu9PtMkIh6vH3rDuA,719
24
24
  umaudemc/webui.py,sha256=XlDV87tOOdcclHp2_oerrvHwRmCZdqAR4PppqeZm47A,11072
@@ -40,8 +40,8 @@ umaudemc/command/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
40
40
  umaudemc/command/check.py,sha256=PyaPDMw5OnPxSIZ10U4we0b5tTrjnYKAtAeQkJh2uLE,12031
41
41
  umaudemc/command/graph.py,sha256=JqGzESC2sn-LBh2sqctrij03ItzwDO808s2qkNKUle0,6112
42
42
  umaudemc/command/pcheck.py,sha256=eV4e4GcOHanP4hcIhMKd5Js22_ONac6kYj70FXun3mY,7274
43
- umaudemc/command/scheck.py,sha256=jiVNsLfbNDUleWl9HuNW7GTQdszd5cefZJn0_Epm9UU,4967
44
- umaudemc/command/sworker.py,sha256=0WzLoJBnjc5EYTuZK0fOQ5yoVhEBCH2ffm4WS8oM_yw,4383
43
+ umaudemc/command/scheck.py,sha256=wByVmANax4-Jw3S6MHbXevYDiVP81HIhMk1M7yZwuMs,6205
44
+ umaudemc/command/sworker.py,sha256=rTfGbIRvXV7cVEmlTwkKrP9tfZN0ESNJKtVLnIVCOMs,4654
45
45
  umaudemc/command/test.py,sha256=Ru21JXNF61F5N5jayjwxp8okIjOAvuZuAlV_5ltQ-GU,37088
46
46
  umaudemc/data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
47
47
  umaudemc/data/opsem.maude,sha256=geDP3_RMgtS1rRmYOybJDCXn_-dyHHxg0JxfYg1ftv0,27929
@@ -52,9 +52,9 @@ umaudemc/data/smcgraph.js,sha256=iCNQNmsuGdL_GLnqVhGDisediFtedxw3C24rxSiQwx8,667
52
52
  umaudemc/data/smcview.css,sha256=ExFqrMkSeaf8VxFrJXflyCsRW3FTwbv78q0Hoo2UVrM,3833
53
53
  umaudemc/data/smcview.js,sha256=_fHum1DRU1mhco-9-c6KqTLgiC5u_cCUf61jIK7wcIQ,14509
54
54
  umaudemc/data/templog.maude,sha256=TZ-66hVWoG6gp7gJpS6FsQn7dpBTLrr76bKo-UfHGcA,9161
55
- umaudemc-0.15.1.dist-info/licenses/LICENSE,sha256=MrEGL32oSWfnAZ0Bq4BZNcqnq3Mhp87Q4w6-deXfFnA,17992
56
- umaudemc-0.15.1.dist-info/METADATA,sha256=zePwkmspjbYQuOOw0NfnjpS3XxFa-LvX5LH_azwapys,1654
57
- umaudemc-0.15.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
58
- umaudemc-0.15.1.dist-info/entry_points.txt,sha256=8rYRlLkn4orZtAoujDSeol1t_UFBrK0bfjmLTNv9B44,52
59
- umaudemc-0.15.1.dist-info/top_level.txt,sha256=Yo_CF78HLGBSblk3890qLcx6XZ17zHCbGcT9iG8sfMw,9
60
- umaudemc-0.15.1.dist-info/RECORD,,
55
+ umaudemc-0.16.0.dist-info/licenses/LICENSE,sha256=MrEGL32oSWfnAZ0Bq4BZNcqnq3Mhp87Q4w6-deXfFnA,17992
56
+ umaudemc-0.16.0.dist-info/METADATA,sha256=53-pCZwFfBsUPOyA_ocXL1TauD09WrY6NL574Sf1LbQ,1654
57
+ umaudemc-0.16.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
58
+ umaudemc-0.16.0.dist-info/entry_points.txt,sha256=8rYRlLkn4orZtAoujDSeol1t_UFBrK0bfjmLTNv9B44,52
59
+ umaudemc-0.16.0.dist-info/top_level.txt,sha256=Yo_CF78HLGBSblk3890qLcx6XZ17zHCbGcT9iG8sfMw,9
60
+ umaudemc-0.16.0.dist-info/RECORD,,