umaudemc 0.15.1__py3-none-any.whl → 0.17.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
umaudemc/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = '0.15.1'
1
+ __version__ = '0.17.0'
umaudemc/__main__.py CHANGED
@@ -316,6 +316,12 @@ def build_parser():
316
316
  '--distribute',
317
317
  help='Distribute the computation over some machines'
318
318
  )
319
+ parser_scheck.add_argument(
320
+ '-D',
321
+ metavar='name=value',
322
+ action='append',
323
+ help='Define a constant to be used in QuaTEx expressions'
324
+ )
319
325
  parser_scheck.add_argument(
320
326
  '--format', '-f',
321
327
  help='Output format for the simulation results',
@@ -327,6 +333,10 @@ def build_parser():
327
333
  help='Plot the results of parametric queries (using Matplotlib)',
328
334
  action='store_true'
329
335
  )
336
+ parser_scheck.add_argument(
337
+ '--dump',
338
+ help='Dump query evaluations into the given file',
339
+ )
330
340
 
331
341
  parser_scheck.set_defaults(mode='scheck')
332
342
 
umaudemc/api.py CHANGED
@@ -420,7 +420,7 @@ class MaudeModel:
420
420
  return result, stats
421
421
 
422
422
  def scheck(self, quatex, assign='uniform', alpha=0.05, delta=0.5, block=30, nsims=(30, None),
423
- seed=None, jobs=1, usermsgs=_usermsgs, verbose=False):
423
+ seed=None, jobs=1, usermsgs=_usermsgs, verbose=False, constants=None):
424
424
  """
425
425
  Statistical model checking of a given QuaTEx expression
426
426
 
@@ -444,15 +444,17 @@ class MaudeModel:
444
444
  :type usermsgs: An object with print_error, print_warning and print_info methods
445
445
  :param verbose: Enable verbose messages about the simulation state between blocks
446
446
  :type verbose: bool
447
+ :param constants: Constants to be used in the QuaTEx expression
448
+ :type constants: dict[str, float]
447
449
  :returns: the probabilistic model-checking result and a dictionary with statistics
448
450
  """
449
451
 
450
452
  # Parse the QuaTEx query
451
453
  if isinstance(quatex, str) and os.path.exists(quatex):
452
454
  with open(quatex) as quatex_file:
453
- program = _quatex.parse_quatex(quatex_file, filename=quatex)
455
+ program = _quatex.parse_quatex(quatex_file, filename=quatex, constants=constants)
454
456
  else:
455
- program = _quatex.parse_quatex(quatex)
457
+ program = _quatex.parse_quatex(quatex, constants=constants)
456
458
 
457
459
  if not program:
458
460
  return None
@@ -19,13 +19,16 @@ def show_results(program, nsims, qdata):
19
19
  qdata_it = iter(qdata)
20
20
  q = next(qdata_it, None)
21
21
 
22
- for k, (fname, line, column, params) in enumerate(program.query_locations):
22
+ for k, query in enumerate(program.queries):
23
23
  # Print the query name and location only if there are many
24
24
  if program.nqueries > 1:
25
- print(f'Query {k + 1} ({fname}:{line}:{column})')
25
+ # If the number of simulation is lower for this query
26
+ sim_detail = f' ({q.n} simulations)' if q.n != nsims else ''
27
+
28
+ print(f'Query {k + 1} ({query.filename}:{query.line}:{query.column}){sim_detail}')
26
29
 
27
30
  # For parametric queries, we show the result for every value
28
- var = params[0] if params else None
31
+ var = query.parameters[0] if query.parameters else None
29
32
 
30
33
  while q and q.query == k:
31
34
  if var:
@@ -33,6 +36,11 @@ def show_results(program, nsims, qdata):
33
36
  else:
34
37
  print(f' μ = {q.mu:<25} σ = {q.s:<25} r = {q.h}')
35
38
 
39
+ # If executions have been discarded
40
+ if q.discarded:
41
+ total = q.discarded + q.n
42
+ print(f' where {q.discarded} executions out of {total} ({round(q.discarded / total * 100, 2)}%) have been discarded')
43
+
36
44
  q = next(qdata_it, None)
37
45
 
38
46
 
@@ -85,10 +93,10 @@ def plot_results(program, qdata):
85
93
  return
86
94
 
87
95
  for k, xs, ys, rs in results:
88
- line, column, _ = program.query_locations[k]
96
+ query = program.queries[k]
89
97
 
90
98
  # Plot the mean
91
- p = plt.plot(xs, ys, label=f'{line}:{column}')
99
+ p = plt.plot(xs, ys, label=f'{query.line}:{query.column}')
92
100
  # Plot the confidence interval
93
101
  plt.fill_between(xs, [y - r for y, r in zip(ys, rs)],
94
102
  [y + r for y, r in zip(ys, rs)],
@@ -121,6 +129,31 @@ def parse_range(rtext):
121
129
  return lims
122
130
 
123
131
 
132
+ def parse_defines(defines):
133
+ """Parse constant definitions with -D in the command line"""
134
+
135
+ constants = {}
136
+
137
+ if not defines:
138
+ return constants
139
+
140
+ for df in defines:
141
+ if df.count('=') != 1:
142
+ usermsgs.print_error(f'The argument of -D must be key=value, not {df}. This definition will be ignored.')
143
+ continue
144
+
145
+ key, value = df.strip().split('=')
146
+
147
+ try:
148
+ constants[key] = float(value)
149
+
150
+ except ValueError:
151
+ usermsgs.print_error(f'The value of a -D assignment must be a number, not {value}. This definition will be ignored.')
152
+ continue
153
+
154
+ return constants
155
+
156
+
124
157
  def scheck(args):
125
158
  """Statistical check subcommand"""
126
159
 
@@ -135,8 +168,13 @@ def scheck(args):
135
168
  usermsgs.print_error(f'The query file "{args.query}" does not exist.')
136
169
  return 1
137
170
 
171
+ # Parse the constants definitions in the command line
172
+ constants = parse_defines(args.D)
173
+
138
174
  with open(args.query) as quatex_file:
139
- program, seen_files = parse_quatex(quatex_file, filename=args.query, legacy=args.assign == 'pmaude')
175
+ program, seen_files = parse_quatex(quatex_file, filename=args.query,
176
+ legacy=args.assign == 'pmaude',
177
+ constants=constants)
140
178
 
141
179
  if not program:
142
180
  return 1
@@ -167,7 +205,7 @@ def scheck(args):
167
205
  if args.distribute:
168
206
  from ..distributed import distributed_check
169
207
 
170
- num_sims, qdata = distributed_check(args, data, min_sim, max_sim, program, seen_files)
208
+ num_sims, qdata = distributed_check(args, data, min_sim, max_sim, program, constants, seen_files)
171
209
 
172
210
  if num_sims is None:
173
211
  return 1
@@ -182,7 +220,12 @@ def scheck(args):
182
220
  # Call the statistical model checker
183
221
  num_sims, qdata = check(program, simulator,
184
222
  args.seed, args.alpha, args.delta, args.block,
185
- min_sim, max_sim, args.jobs, args.verbose)
223
+ min_sim, max_sim, args.jobs, args.verbose, args.dump)
224
+
225
+ # Check the discarded count when there is no convergence
226
+ for query in qdata:
227
+ if not query.converged:
228
+ query.discarded = num_sims - query.n
186
229
 
187
230
  # Print the results on the terminal
188
231
  (show_json if args.format == 'json' else show_results)(program, num_sims, qdata)
@@ -71,7 +71,8 @@ class Worker:
71
71
 
72
72
  with open(os.path.join(tmp_dir, args.query)) as quatex_file:
73
73
  self.program, _ = parse_quatex(quatex_file, filename=args.query,
74
- legacy=args.assign == 'pmaude')
74
+ legacy=args.assign == 'pmaude',
75
+ constants=args.constants)
75
76
 
76
77
  if not self.program:
77
78
  return False
@@ -92,12 +93,15 @@ class Worker:
92
93
  block = self.block
93
94
 
94
95
  # Query data
95
- qdata = [QueryData(k, idict)
96
- for k, qinfo in enumerate(program.query_locations)
97
- for idict in make_parameter_dicts(qinfo[3])]
96
+ # (delta, its second argument, does not matter because
97
+ # convergence is not evaluated by the worker)
98
+ qdata = [QueryData(k, 1.0, idict)
99
+ for k, qinfo in enumerate(program.queries)
100
+ for idict, _ in make_parameter_dicts(qinfo, 1.0)]
98
101
 
99
102
  sums = array('d', [0.0] * len(qdata))
100
103
  sum_sq = array('d', [0.0] * len(qdata))
104
+ counts = array('i', [0] * len(qdata))
101
105
 
102
106
  while True:
103
107
 
@@ -105,11 +109,13 @@ class Worker:
105
109
  # Run the simulation and compute all queries at once
106
110
  values = run(program, qdata, simulator)
107
111
 
108
- for k in range(len(qdata)):
109
- sums[k] += values[k]
110
- sum_sq[k] += values[k] * values[k]
112
+ for k, value in enumerate(values):
113
+ if value is not None:
114
+ sums[k] += value
115
+ sum_sq[k] += value * value
116
+ counts[k] += 1
111
117
 
112
- conn.send(b'b' + sums.tobytes() + sum_sq.tobytes())
118
+ conn.send(b'b' + sums.tobytes() + sum_sq.tobytes() + counts.tobytes())
113
119
 
114
120
  # Check whether to continue
115
121
  answer = conn.recv(1)
@@ -125,6 +131,7 @@ class Worker:
125
131
  for k in range(len(qdata)):
126
132
  sums[k] = 0
127
133
  sum_sq[k] = 0
134
+ counts[k] = 0
128
135
 
129
136
 
130
137
  def handle_request(message, conn, addr, keep_file):
umaudemc/distributed.py CHANGED
@@ -9,6 +9,7 @@ import random
9
9
  import re
10
10
  import selectors
11
11
  import socket
12
+ import sys
12
13
  import tarfile
13
14
  from array import array
14
15
  from contextlib import ExitStack
@@ -174,7 +175,7 @@ def process_dspec(dspec, fname):
174
175
  return True
175
176
 
176
177
 
177
- def setup_workers(args, initial_data, dspec, seen_files, stack):
178
+ def setup_workers(args, initial_data, dspec, constants, seen_files, stack):
178
179
  """Setup workers and send problem data"""
179
180
 
180
181
  workers = dspec['workers']
@@ -188,7 +189,7 @@ def setup_workers(args, initial_data, dspec, seen_files, stack):
188
189
  COPY = ('initial', 'strategy', 'module', 'metamodule', 'opaque', 'full_matchrew',
189
190
  'purge_fails', 'merge_states', 'assign', 'block', 'query', 'assign', 'advise', 'verbose')
190
191
 
191
- data = {key: args.__dict__[key] for key in COPY} | {'file': 'source.maude'}
192
+ data = {key: args.__dict__[key] for key in COPY} | {'file': 'source.maude', 'constants': constants}
192
193
 
193
194
  # Make a flattened version of the Maude file
194
195
  flat_source = io.BytesIO()
@@ -200,6 +201,10 @@ def setup_workers(args, initial_data, dspec, seen_files, stack):
200
201
  # Save the sockets for each worker
201
202
  sockets = []
202
203
 
204
+ # Root of the QuaTEx sources
205
+ quatex_root = os.path.commonpath([os.path.dirname(fn) for fn in seen_files])
206
+ data['query'] = os.path.relpath(data['query'], start=quatex_root)
207
+
203
208
  for worker, seed in zip(workers, seeds):
204
209
  address, port = worker['address'], worker['port']
205
210
 
@@ -229,12 +234,8 @@ def setup_workers(args, initial_data, dspec, seen_files, stack):
229
234
  tarf.addfile(flat_info, flat_source)
230
235
 
231
236
  for file in seen_files:
232
- relpath = os.path.relpath(file)
233
-
234
- if relpath.startswith('..'):
235
- usermsgs.print_error('QuaTEx file outside the working tree, it will not be included and the execution will fail.')
236
- else:
237
- tarf.add(relpath)
237
+ relpath = os.path.relpath(file, start=quatex_root)
238
+ tarf.add(file, arcname=relpath)
238
239
 
239
240
  fobj.flush()
240
241
 
@@ -248,7 +249,7 @@ def setup_workers(args, initial_data, dspec, seen_files, stack):
248
249
  return sockets
249
250
 
250
251
 
251
- def distributed_check(args, initial_data, min_sim, max_sim, program, seen_files):
252
+ def distributed_check(args, initial_data, min_sim, max_sim, program, constants, seen_files):
252
253
  """Distributed statistical model checking"""
253
254
 
254
255
  # Load the distribution specification
@@ -260,10 +261,10 @@ def distributed_check(args, initial_data, min_sim, max_sim, program, seen_files)
260
261
  with ExitStack() as stack:
261
262
 
262
263
  # Socket to connect with the workers
263
- if not (sockets := setup_workers(args, initial_data, dspec, seen_files, stack)):
264
+ if not (sockets := setup_workers(args, initial_data, dspec, constants, seen_files, stack)):
264
265
  return None, None
265
266
 
266
- print('All workers are ready. Starting...')
267
+ print('All workers are ready. Starting...', file=sys.stderr)
267
268
 
268
269
  # Use a selector to wait for updates from any worker
269
270
  selector = selectors.DefaultSelector()
@@ -273,11 +274,12 @@ def distributed_check(args, initial_data, min_sim, max_sim, program, seen_files)
273
274
  sock.send(b'c')
274
275
 
275
276
  buffer = array('d')
277
+ ibuffer = array('i')
276
278
 
277
279
  # Query data
278
- qdata = [QueryData(k, idict)
279
- for k, qinfo in enumerate(program.query_locations)
280
- for idict in make_parameter_dicts(qinfo[3])]
280
+ qdata = [QueryData(k, delta, idict)
281
+ for k, qinfo in enumerate(program.queries)
282
+ for idict, delta in make_parameter_dicts(qinfo, args.delta)]
281
283
  nqueries = len(qdata)
282
284
  num_sims = 0
283
285
 
@@ -293,16 +295,19 @@ def distributed_check(args, initial_data, min_sim, max_sim, program, seen_files)
293
295
  answer = sock.recv(1)
294
296
 
295
297
  if answer == b'b':
296
- data = sock.recv(16 * nqueries)
297
- buffer.frombytes(data)
298
+ data = sock.recv(24 * nqueries)
299
+ buffer.frombytes(data[:16 * nqueries])
300
+ ibuffer.frombytes(data[16 * nqueries:])
298
301
 
299
302
  for k in range(nqueries):
300
303
  qdata[k].sum += buffer[k]
301
304
  qdata[k].sum_sq += buffer[nqueries + k]
305
+ qdata[k].n += ibuffer[k]
302
306
 
303
307
  num_sims += key.data['block']
304
308
 
305
309
  del buffer[:]
310
+ del ibuffer[:]
306
311
  finished.append(key.fileobj)
307
312
 
308
313
  else:
@@ -311,7 +316,7 @@ def distributed_check(args, initial_data, min_sim, max_sim, program, seen_files)
311
316
  sockets.remove(key.fileobj)
312
317
 
313
318
  # Check whether the simulation has converged
314
- converged = check_interval(qdata, num_sims, args.alpha, args.delta, quantile, args.verbose)
319
+ converged = check_interval(qdata, num_sims, min_sim, args.alpha, quantile, args.verbose)
315
320
 
316
321
  if converged or max_sim and num_sims >= max_sim:
317
322
  break
umaudemc/pyslang.py CHANGED
@@ -47,6 +47,11 @@ class Instruction:
47
47
  def __repr__(self):
48
48
  return f'Instruction({self.NAMES[self.type]}, {self.extra})'
49
49
 
50
+ @property
51
+ def does_rewrite(self):
52
+ """Whether the instruction rewrites the subject term"""
53
+ return self.type in (self.RLAPP, self.RWCNEXT) and self.extra[0]
54
+
50
55
 
51
56
  class StratProgram:
52
57
  """Compiled strategy program"""
@@ -506,7 +511,7 @@ class StratCompiler:
506
511
 
507
512
  self.generate(next(s.arguments()), p, False)
508
513
 
509
- initial_jump.extra = (p.pc + 1, initial_pc)
514
+ initial_jump.extra = (p.pc + (2 if self.use_kleene else 1), initial_pc)
510
515
  p.append(Instruction.JUMP, (p.pc + 1, initial_pc))
511
516
 
512
517
  if self.use_kleene:
@@ -658,12 +663,20 @@ class StratCompiler:
658
663
  def norm(self, s, p, tail):
659
664
  # A variation of the conditional and the iteration
660
665
  initial_pc = p.pc
666
+
667
+ if self.use_kleene:
668
+ p.append(Instruction.KLEENE, (initial_pc, True))
669
+
661
670
  subsearch = p.append(Instruction.SUBSEARCH)
662
671
  self.generate(next(s.arguments()), p, False)
663
672
  p.append(Instruction.NOFAIL)
664
673
  p.append(Instruction.JUMP, (initial_pc, ))
674
+
665
675
  subsearch.extra = p.pc
666
676
 
677
+ if self.use_kleene:
678
+ p.append(Instruction.KLEENE, (initial_pc, True))
679
+
667
680
  def call(self, s, p, tail):
668
681
  # Strategy calls are executed by CALL instructions, but the body of
669
682
  # their definitions should be generated too. This is done afterwards
@@ -869,9 +882,6 @@ class StratCompiler:
869
882
  def __repr__(self):
870
883
  return f'BasicBlock({self.start}, {self.length}, {self.has_rewrite}, {self.next}, {self.reachable})'
871
884
 
872
- def does_rewrite(inst):
873
- return inst.type == Instruction.RLAPP or inst.type == Instruction.RWCNEXT and inst.extra[0]
874
-
875
885
  # (1) Abstract the code as a graph of blocks linked by jumps
876
886
 
877
887
  # Blocks are aggregated by definition (the first entry is the initial expression)
@@ -891,7 +901,7 @@ class StratCompiler:
891
901
  inst = p[k]
892
902
 
893
903
  # This block contains a rewrite
894
- if does_rewrite(inst):
904
+ if inst.does_rewrite:
895
905
  current.has_rewrite = True
896
906
 
897
907
  # JUMP, CHOICE, SUBSEARCH, and CALL instructions close a block
@@ -1099,7 +1109,7 @@ class StratCompiler:
1099
1109
  # notify_pending is not possible within a rewriting condition
1100
1110
 
1101
1111
  # There is a RLAPP, so we need to issue a NOTIFY for the previous one
1102
- if does_rewrite(inst):
1112
+ if inst.does_rewrite:
1103
1113
  notify_points.append(k)
1104
1114
 
1105
1115
  # Failures discard the notification
@@ -1116,7 +1126,7 @@ class StratCompiler:
1116
1126
  notify_points.append(k)
1117
1127
  notify_pending = False
1118
1128
 
1119
- elif rwc_level == 0 and does_rewrite(inst):
1129
+ elif rwc_level == 0 and inst.does_rewrite:
1120
1130
  notify_pending = True
1121
1131
 
1122
1132
  for sc in block.next: