restage 0.2.4__py3-none-any.whl → 0.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
restage/cache.py CHANGED
@@ -2,6 +2,7 @@ from __future__ import annotations
2
2
 
3
3
  from mccode_antlr.instr import Instr
4
4
  from .tables import InstrEntry, SimulationTableEntry, SimulationEntry
5
+ from mccode_antlr.compiler.c import CBinaryTarget
5
6
 
6
7
 
7
8
  def setup_database(named: str):
@@ -37,40 +38,41 @@ def directory_under_module_data_path(sub: str, prefix=None, suffix=None, name=No
37
38
  return Path(mkdtemp(dir=under, prefix=prefix or '', suffix=suffix or ''))
38
39
 
39
40
 
40
- def _compile_instr(entry: InstrEntry, instr: Instr, config: dict | None = None, target=None, generator=None):
41
+ def _compile_instr(entry: InstrEntry, instr: Instr, config: dict | None = None,
42
+ mpi: bool = False, acc: bool = False,
43
+ target=None, generator=None):
44
+ from tempfile import mkdtemp
41
45
  from mccode_antlr import __version__
42
46
  from mccode_antlr.compiler.c import compile_instrument, CBinaryTarget
43
47
  if config is None:
44
48
  config = dict(default_main=True, enable_trace=False, portable=False, include_runtime=True,
45
49
  embed_instrument_file=False, verbose=False)
46
50
  if target is None:
47
- target = CBinaryTarget(mpi=False, acc=False, count=1, nexus=False)
51
+ target = CBinaryTarget(mpi=mpi or False, acc=acc or False, count=1, nexus=False)
48
52
  if generator is None:
49
53
  from mccode_antlr.translators.target import MCSTAS_GENERATOR
50
54
  generator = MCSTAS_GENERATOR
51
55
 
52
- output = module_data_path('bin').joinpath(entry.id)
53
- if not output.exists():
54
- output.mkdir(parents=True)
55
-
56
+ output = directory_under_module_data_path('bin')
56
57
  binary_path = compile_instrument(instr, target, output, generator=generator, config=config)
57
58
  entry.mccode_version = __version__
58
59
  entry.binary_path = str(binary_path)
59
60
  return entry
60
61
 
61
62
 
62
- def cache_instr(instr: Instr, mccode_version=None, binary_path=None, **kwargs) -> InstrEntry:
63
+ def cache_instr(instr: Instr, mpi: bool = False, acc: bool = False, mccode_version=None, binary_path=None, **kwargs) -> InstrEntry:
63
64
  instr_contents = str(instr)
64
- query = DATABASE.query_instr_file(search={'file_contents': instr_contents}) # returns a list[InstrTableEntry]
65
+ # the query returns a list[InstrTableEntry]
66
+ query = DATABASE.query_instr_file(search={'file_contents': instr_contents, 'mpi': mpi, 'acc': acc})
65
67
  if len(query) > 1:
66
68
  raise RuntimeError(f"Multiple entries for {instr_contents} in {DATABASE.instr_file_table}")
67
69
  elif len(query) == 1:
68
70
  return query[0]
69
71
 
70
- instr_file_entry = InstrEntry(file_contents=instr_contents, binary_path=binary_path or '',
72
+ instr_file_entry = InstrEntry(file_contents=instr_contents, mpi=mpi, acc=acc, binary_path=binary_path or '',
71
73
  mccode_version=mccode_version or 'NONE')
72
74
  if binary_path is None:
73
- instr_file_entry = _compile_instr(instr_file_entry, instr, **kwargs)
75
+ instr_file_entry = _compile_instr(instr_file_entry, instr, mpi=mpi, acc=acc, **kwargs)
74
76
 
75
77
  DATABASE.insert_instr_file(instr_file_entry)
76
78
  return instr_file_entry
restage/mcpl.py CHANGED
@@ -4,11 +4,11 @@ from pathlib import Path
4
4
  def mcpl_real_filename(filename: Path) -> Path:
5
5
  """MCPL_output from McCode instruments has the bad habit of changing the output file name silently.
6
6
  Find the _real_ output file name by looking for the expected variants"""
7
- if filename.exists():
7
+ if filename.exists() and filename.is_file():
8
8
  return filename
9
- if filename.with_suffix('.mcpl').exists():
9
+ if filename.with_suffix('.mcpl').exists() and filename.with_suffix('.mcpl').is_file():
10
10
  return filename.with_suffix('.mcpl')
11
- if filename.with_suffix('.mcpl.gz').exists():
11
+ if filename.with_suffix('.mcpl.gz').exists() and filename.with_suffix('.mcpl.gz').is_file():
12
12
  return filename.with_suffix('.mcpl.gz')
13
13
  raise FileNotFoundError(f'Could not find MCPL file {filename}')
14
14
 
@@ -37,7 +37,7 @@ def mcpl_particle_count(filename):
37
37
  return int(m.group(1))
38
38
 
39
39
 
40
- def mcpl_merge_files(files: list[Path], filename: str, keep_originals: bool = False):
40
+ def mcpl_merge_files(files: list[Path], filepath: Path, keep_originals: bool = False):
41
41
  """Merge a list of MCPL files into a single file using mcpltool.
42
42
 
43
43
  :param files: The list of files to merge.
@@ -51,6 +51,14 @@ def mcpl_merge_files(files: list[Path], filename: str, keep_originals: bool = Fa
51
51
  """
52
52
  from subprocess import run
53
53
  real_filenames = [mcpl_real_filename(f) for f in files]
54
+ # if the real filenames have .mcpl or .mcpl.gz, the merged filename should too
55
+ ext = ''
56
+ if real_filenames[0].name.endswith('.mcpl.gz'):
57
+ ext = '.mcpl.gz'
58
+ elif real_filenames[0].name.endswith('.mcpl'):
59
+ ext = '.mcpl'
60
+ filename = filepath.with_suffix(ext).as_posix()
61
+
54
62
  command = ['mcpltool', '--merge', filename] + [str(f) for f in real_filenames]
55
63
  result = run(command)
56
64
  if result.returncode != 0:
@@ -58,3 +66,21 @@ def mcpl_merge_files(files: list[Path], filename: str, keep_originals: bool = Fa
58
66
  if not keep_originals:
59
67
  for file in real_filenames:
60
68
  file.unlink()
69
+
70
+
71
+ def mcpl_rename_file(source: Path, dest: Path, strict: bool = False):
72
+ filepath = mcpl_real_filename(source)
73
+ filename = filepath.name # this could be '{name}', '{name}.mcpl', or '{name}.mcpl.gz'
74
+ ext = ''
75
+ if filepath.name.endswith('.mcpl.gz'):
76
+ ext = '.mcpl.gz'
77
+ elif filepath.name.endswith('.mcpl'):
78
+ ext = '.mcpl'
79
+
80
+ if not dest.name.endswith(ext):
81
+ if strict:
82
+ raise RuntimeError(f"Destination {dest} does not have extension matching {source}")
83
+ dest = dest.with_suffix(ext)
84
+
85
+ filepath.rename(dest)
86
+ return dest
restage/splitrun.py CHANGED
@@ -4,6 +4,7 @@ from typing import Union
4
4
  from pathlib import Path
5
5
  from .range import Singular, MRange
6
6
  from .tables import SimulationEntry, InstrEntry
7
+ from mccode_antlr.compiler.c import CBinaryTarget
7
8
 
8
9
 
9
10
  def make_splitrun_parser():
@@ -30,6 +31,12 @@ def make_splitrun_parser():
30
31
  help='Maximum number of particles to simulate during first instrument simulations')
31
32
  aa('--dryrun', action='store_true', default=False,
32
33
  help='Do not run any simulations, just print the commands')
34
+ aa('--parallel', action='store_true', default=False,
35
+ help='Use MPI multi-process parallelism (primary instrument only at the moment)')
36
+ aa('--gpu', action='store_true', default=False,
37
+ help='Use GPU OpenACC parallelism (primary instrument only at the moment)')
38
+ aa('--process-count', nargs=1, type=int, default=0,
39
+ help='MPI process count, 0 == System Default')
33
40
  # splitrun controlling parameters
34
41
  aa('--split-at', nargs=1, type=str, default=['mcpl_split'],
35
42
  help='Component at which to split -- DEFAULT: mcpl_split')
@@ -153,7 +160,10 @@ def splitrun_from_file(args, parameters, precision):
153
160
  format=args.format[0] if args.format is not None else None,
154
161
  minimum_particle_count=args.nmin[0] if args.nmin is not None else None,
155
162
  maximum_particle_count=args.nmax[0] if args.nmax is not None else None,
156
- dry_run=args.dryrun
163
+ dry_run=args.dryrun,
164
+ parallel=args.parallel,
165
+ gpu=args.gpu,
166
+ process_count=args.process_count,
157
167
  )
158
168
 
159
169
 
@@ -161,6 +171,7 @@ def splitrun(instr, parameters, precision: dict[str, float], split_at=None, grid
161
171
  minimum_particle_count=None,
162
172
  maximum_particle_count=None,
163
173
  dry_run=False,
174
+ parallel=False, gpu=False, process_count=0,
164
175
  callback=None, callback_arguments: dict[str, str] | None = None,
165
176
  output_split_instrs=True,
166
177
  **runtime_arguments):
@@ -190,7 +201,7 @@ def splitrun(instr, parameters, precision: dict[str, float], split_at=None, grid
190
201
  pre_entry = splitrun_pre(pre, pre_parameters, grid, precision, **runtime_arguments,
191
202
  minimum_particle_count=minimum_particle_count,
192
203
  maximum_particle_count=maximum_particle_count,
193
- dry_run=dry_run)
204
+ dry_run=dry_run, parallel=parallel, gpu=gpu, process_count=process_count)
194
205
 
195
206
  splitrun_combined(pre_entry, pre, post, pre_parameters, post_parameters, grid, precision,
196
207
  dry_run=dry_run, callback=callback, callback_arguments=callback_arguments, **runtime_arguments)
@@ -198,16 +209,16 @@ def splitrun(instr, parameters, precision: dict[str, float], split_at=None, grid
198
209
 
199
210
  def splitrun_pre(instr, parameters, grid, precision: dict[str, float],
200
211
  minimum_particle_count=None, maximum_particle_count=None, dry_run=False,
212
+ parallel=False, gpu=False, process_count=0,
201
213
  **runtime_arguments):
202
214
 
203
215
  from functools import partial
204
216
  from .cache import cache_instr
205
217
  from .energy import energy_to_chopper_translator
206
218
  from .range import parameters_to_scan
207
-
208
219
  # check if this instr is already represented in the module's cache database
209
220
  # if not, it is compiled and added to the cache with (hopefully sensible) defaults specified
210
- entry = cache_instr(instr)
221
+ entry = cache_instr(instr, mpi=parallel, acc=gpu)
211
222
  # get the function with converts energy parameters to chopper parameters:
212
223
  translate = energy_to_chopper_translator(instr.name)
213
224
  # determine the scan in the user-defined parameters!
@@ -216,7 +227,7 @@ def splitrun_pre(instr, parameters, grid, precision: dict[str, float],
216
227
  sit_kw = {'seed': args.get('seed'), 'ncount': args.get('ncount'), 'gravitation': args.get('gravitation', False)}
217
228
 
218
229
  step = partial(_pre_step, instr, entry, names, precision, translate, sit_kw, minimum_particle_count,
219
- maximum_particle_count, dry_run)
230
+ maximum_particle_count, dry_run, process_count)
220
231
 
221
232
  # this does not work due to the sqlite database being locked by the parallel processes
222
233
  # from joblib import Parallel, delayed
@@ -230,7 +241,7 @@ def splitrun_pre(instr, parameters, grid, precision: dict[str, float],
230
241
  return entry
231
242
 
232
243
 
233
- def _pre_step(instr, entry, names, precision, translate, kw, min_pc, max_pc, dry_run, values):
244
+ def _pre_step(instr, entry, names, precision, translate, kw, min_pc, max_pc, dry_run, process_count, values):
234
245
  """The per-step function for the primary instrument simulation. Broken out for parallelization"""
235
246
  from .instr import collect_parameter_dict
236
247
  from .cache import cache_has_simulation, cache_simulation, cache_get_simulation
@@ -240,7 +251,8 @@ def _pre_step(instr, entry, names, precision, translate, kw, min_pc, max_pc, dry
240
251
  sim.output_path = do_primary_simulation(sim, entry, nv, kw,
241
252
  minimum_particle_count=min_pc,
242
253
  maximum_particle_count=max_pc,
243
- dry_run=dry_run)
254
+ dry_run=dry_run,
255
+ process_count=process_count)
244
256
  cache_simulation(entry, sim)
245
257
  return cache_get_simulation(entry, sim)
246
258
 
@@ -255,7 +267,7 @@ def splitrun_combined(pre_entry, pre, post, pre_parameters, post_parameters, gri
255
267
  from .instr import collect_parameter_dict
256
268
  from .tables import best_simulation_entry_match
257
269
  from .emulate import mccode_sim_io, mccode_dat_io, mccode_dat_line
258
- instr_entry = cache_instr(post)
270
+ instr_entry = cache_instr(post, mpi=False, acc=False)
259
271
  args = regular_mccode_runtime_dict(runtime_arguments)
260
272
  sit_kw = {'seed': args.get('seed'), 'ncount': args.get('ncount'), 'gravitation': args.get('gravitation', False)}
261
273
  # recombine the parameters to ensure the 'correct' scan is performed
@@ -341,7 +353,8 @@ def do_primary_simulation(sit: SimulationEntry,
341
353
  args: dict,
342
354
  minimum_particle_count: int | None = None,
343
355
  maximum_particle_count: int | None = None,
344
- dry_run: bool = False
356
+ dry_run: bool = False,
357
+ process_count: int = 0,
345
358
  ):
346
359
  from zenlog import log
347
360
  from pathlib import Path
@@ -349,10 +362,11 @@ def do_primary_simulation(sit: SimulationEntry,
349
362
  from mccode_antlr.compiler.c import run_compiled_instrument, CBinaryTarget
350
363
  from .cache import directory_under_module_data_path
351
364
  # create a directory for this simulation based on the uuid generated for the simulation entry
352
- work_dir = directory_under_module_data_path('sim', prefix=f'p_{instr_file_entry.id}_')
365
+ work_dir = directory_under_module_data_path('sim', prefix=f'{Path(instr_file_entry.binary_path).parent.stem}_')
353
366
 
354
367
  binary_at = Path(instr_file_entry.binary_path)
355
- target = CBinaryTarget(mpi=False, acc=False, count=1, nexus=False)
368
+ # process_count == 0 --> system MPI default process count (# physical cores, typically)
369
+ target = CBinaryTarget(mpi=instr_file_entry.mpi, acc=instr_file_entry.acc, count=process_count, nexus=False)
356
370
 
357
371
  # ensure the primary spectrometer uses our output directory
358
372
  args_dict = {k: v for k, v in args.items() if k != 'dir'}
@@ -368,6 +382,13 @@ def do_primary_simulation(sit: SimulationEntry,
368
382
  log.info('Expected mcpl_filename parameter in primary simulation, using default')
369
383
  mcpl_filename = f'{sit.id}.mcpl'
370
384
  sit.parameter_values['mcpl_filename'] = Value.str(mcpl_filename)
385
+
386
+ # strip the extension from the filename passed into the runner:
387
+ if mcpl_filename.endswith('.gz'):
388
+ mcpl_filename = mcpl_filename[:-3]
389
+ if mcpl_filename.endswith('.mcpl'):
390
+ mcpl_filename = mcpl_filename[:-5]
391
+
371
392
  mcpl_filepath = work_dir.joinpath(mcpl_filename)
372
393
  runner = partial(run_compiled_instrument, binary_at, target, capture=False, dry_run=dry_run)
373
394
  if dry_run or args.get('ncount') is None:
@@ -392,7 +413,7 @@ def repeat_simulation_until(count, runner, args: dict, parameters, work_dir: Pat
392
413
  from functools import partial
393
414
  from zenlog import log
394
415
  from .emulate import combine_mccode_dats_in_directories, combine_mccode_sims_in_directories
395
- from .mcpl import mcpl_particle_count, mcpl_merge_files
416
+ from .mcpl import mcpl_particle_count, mcpl_merge_files, mcpl_rename_file
396
417
  goal, latest_result, one_trillion = count, -1, 1_000_000_000_000
397
418
  # avoid looping for too long by limiting the minimum number of particles to simulate
398
419
  minimum_particle_count = _clamp(1, one_trillion, minimum_particle_count or count)
@@ -420,21 +441,23 @@ def repeat_simulation_until(count, runner, args: dict, parameters, work_dir: Pat
420
441
  args['seed'] = random.randint(1, 2 ** 32 - 1)
421
442
 
422
443
  outputs.append(work_dir.joinpath(f'{len(files)}'))
423
- files.append(work_dir.joinpath(f'part_{len(files)}.mcpl'))
444
+ files.append(work_dir.joinpath(f'part_{len(files)}')) # appending the extension here breaks MCPL+MPI?
424
445
  args['dir'] = outputs[-1]
425
446
  # adjust our guess for how many particles to simulate : how many we need divided by the last transmission
426
447
  args['ncount'] = clamp(((goal - sum(counts)) * args['ncount']) // counts[-1] if len(counts) else goal)
427
- runner(_args_pars_mcpl(args, parameters, files[-1]))
428
- counts.append(mcpl_particle_count(files[-1]))
448
+ # recycle the intended-output mcpl filename to avoid breaking mcpl file-merging
449
+ runner(_args_pars_mcpl(args, parameters, mcpl_filepath))
450
+ counts.append(mcpl_particle_count(mcpl_filepath))
451
+ # rename the outputfile to this run's filename
452
+ files[-1] = mcpl_rename_file(mcpl_filepath, files[-1])
429
453
 
430
454
  # now we need to concatenate the mcpl files, and combine output (.dat and .sim) files
431
- mcpl_merge_files(files, str(mcpl_filepath))
455
+ mcpl_merge_files(files, mcpl_filepath)
432
456
  combine_mccode_dats_in_directories(outputs, work_dir)
433
457
  combine_mccode_sims_in_directories(outputs, work_dir)
434
458
 
435
459
 
436
- def do_secondary_simulation(p_sit: SimulationEntry, entry: InstrEntry, pars: dict, args: dict,
437
- dry_run: bool = False):
460
+ def do_secondary_simulation(p_sit: SimulationEntry, entry: InstrEntry, pars: dict, args: dict, dry_run: bool = False):
438
461
  from zenlog import log
439
462
  from pathlib import Path
440
463
  from shutil import copy
@@ -452,7 +475,7 @@ def do_secondary_simulation(p_sit: SimulationEntry, entry: InstrEntry, pars: dic
452
475
 
453
476
  mcpl_path = mcpl_real_filename(Path(p_sit.output_path).joinpath(mcpl_filename))
454
477
  executable = Path(entry.binary_path)
455
- target = CBinaryTarget(mpi=False, acc=False, count=1, nexus=False)
478
+ target = CBinaryTarget(mpi=entry.mpi, acc=entry.acc, count=1 if not entry.mpi else 0, nexus=False)
456
479
  run_compiled_instrument(executable, target, _args_pars_mcpl(args, pars, mcpl_path), capture=False, dry_run=dry_run)
457
480
 
458
481
  if not dry_run:
restage/tables.py CHANGED
@@ -316,6 +316,8 @@ class NexusStructureEntry:
316
316
  class InstrEntry:
317
317
  """A class to represent the instrument file and its compiled binary when stored as an entry in a table"""
318
318
  file_contents: str
319
+ mpi: bool
320
+ acc: bool
319
321
  binary_path: str
320
322
  mccode_version: str = field(default_factory=str)
321
323
  id: str = field(default_factory=uuid)
@@ -324,8 +326,8 @@ class InstrEntry:
324
326
 
325
327
  @classmethod
326
328
  def from_query_result(cls, values):
327
- fid, file_contents, binary_path, mccode_version, creation, last_access = values
328
- return cls(file_contents, binary_path, mccode_version, fid, creation, last_access)
329
+ fid, file_contents, mpi, acc, binary_path, mccode_version, creation, last_access = values
330
+ return cls(file_contents, mpi != 0, acc != 0, binary_path, mccode_version, fid, creation, last_access)
329
331
 
330
332
  def __post_init__(self):
331
333
  if len(self.mccode_version) == 0:
@@ -334,12 +336,14 @@ class InstrEntry:
334
336
 
335
337
  @staticmethod
336
338
  def columns():
337
- return ['id', 'file_contents', 'binary_path', 'mccode_version', 'creation', 'last_access']
339
+ return ['id', 'file_contents', 'mpi', 'acc', 'binary_path', 'mccode_version', 'creation', 'last_access']
338
340
 
339
341
  def values(self):
340
342
  str_values = [f"'{x}'" for x in (self.id, self.file_contents, self.binary_path, self.mccode_version)]
343
+ int_values = [f'{x}' for x in (self.mpi, self.acc)]
341
344
  flt_values = [f'{self.creation}', f'{self.last_access}']
342
- return str_values + flt_values
345
+ # matches id, file_contents, mpi, acc, binary_path, mccode_version, creation, last_access order
346
+ return str_values[:2] + int_values + str_values[2:] + flt_values
343
347
 
344
348
  @classmethod
345
349
  def create_sql_table(cls, table_name: str = 'instr_files'):
@@ -1,13 +1,13 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: restage
3
- Version: 0.2.4
3
+ Version: 0.3.1
4
4
  Author-email: Gregory Tucker <gregory.tucker@ess.eu>
5
5
  Classifier: License :: OSI Approved :: BSD License
6
6
  Description-Content-Type: text/markdown
7
- Requires-Dist: zenlog ==1.1
8
- Requires-Dist: platformdirs ==3.11
9
- Requires-Dist: psutil ==5.9.6
10
- Requires-Dist: mccode-antlr[hdf5] ==0.5.3
7
+ Requires-Dist: zenlog >=1.1
8
+ Requires-Dist: platformdirs >=3.11
9
+ Requires-Dist: psutil >=5.9.6
10
+ Requires-Dist: mccode-antlr[hdf5] ==0.7.0
11
11
  Requires-Dist: importlib-metadata ; python_version < "3.8"
12
12
  Provides-Extra: test
13
13
  Requires-Dist: pytest ; extra == 'test'
@@ -1,19 +1,19 @@
1
1
  restage/__init__.py,sha256=v0dAUYVkvzjd3j6gjFdbunV_P8U9XxsGgLFGwbxBy6E,787
2
2
  restage/bifrost_choppers.py,sha256=aeAw4JgkGPSewU-mqGlGqo1AmN2dmf6FP9u4VDx-eLo,6338
3
- restage/cache.py,sha256=OAJCOzm6cgPm-SEoG53hzdS7rWUG-qebKZxw_n_xIfQ,8157
3
+ restage/cache.py,sha256=Z60tn372Z_KlXC4e0UqMPPuM0NR8e6eD3aNbdR_q4rM,8374
4
4
  restage/cspec_choppers.py,sha256=ZWxyCcwYn4z9ZNqj_r6RC9ImbhVjYc1fmv-Ijm8A2Yk,206
5
5
  restage/database.py,sha256=pblHu8hCV5u3uyE8aUrnBSsfjDLYrxy9JRtnRuOvTXQ,9152
6
6
  restage/emulate.py,sha256=VrhfZJIbECdbDS-MHklqRuAIy9cRkjZkwPBTKQSQoe0,6164
7
7
  restage/energy.py,sha256=ltuMfByx3uwU8IBBriENbIH5EEfKdZ1-w84Urkcs0xM,2664
8
8
  restage/instr.py,sha256=G8iaXbgN0JKgv37S1oDuf0mEl2WP9VFuwAGeA0yw0R4,2412
9
- restage/mcpl.py,sha256=0F9A_edwSR9awQipicI3ipsMAJFk9zCspJhCOrmhkDg,2407
9
+ restage/mcpl.py,sha256=BZYxBytughjc8slR6gUaBy3D7gzo7Yl3ACXrXhWgagI,3403
10
10
  restage/range.py,sha256=TjOf4DSKfgoAIcrWQvv6MrtksQpnGJHdsEjVI5K-UfI,8116
11
11
  restage/run.py,sha256=nk8d7cIyIqSt-5pyGm68Zak5H1a-fbo_z2_36eN-08E,1481
12
12
  restage/scan.py,sha256=Yx8OQSBG6I2_64sW0LIDb0glVKwWoxUQQznASXgDZFQ,1432
13
- restage/splitrun.py,sha256=QXmQOZ14EUdhBOzLwX1djTg9kPmxqi0bZJHQWsb3mhs,23339
14
- restage/tables.py,sha256=VMApcu4KtsBlUvAqCSKHDZ58FXhNPihr90OU12YAejw,15764
15
- restage-0.2.4.dist-info/METADATA,sha256=cv4eP6UBB5bMu0xFnwgKBTf-oUqWXLP9s7K304ETjNM,4773
16
- restage-0.2.4.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
17
- restage-0.2.4.dist-info/entry_points.txt,sha256=dY3ZanXW6rBLd2vBo-WMcbgO7E10GVDE0t9GASuVjgY,90
18
- restage-0.2.4.dist-info/top_level.txt,sha256=iM_pb-taTZ0S2WMoDnt_qDMZoNMjmM19z3tTCuVm1IE,8
19
- restage-0.2.4.dist-info/RECORD,,
13
+ restage/splitrun.py,sha256=PGWfiyVMDLpHkbc_L78sutBhunAN3e5bGijm_rWuDfk,24938
14
+ restage/tables.py,sha256=FJ-8syvzgjdLvvJW9hVK4VV_rNiZhW34-4BMKQsGjtU,16034
15
+ restage-0.3.1.dist-info/METADATA,sha256=pvRu9I6iVkvMP99RXZG7uXEzNdIFXdis3_TB8Epv29k,4773
16
+ restage-0.3.1.dist-info/WHEEL,sha256=UvcQYKBHoFqaQd6LKyqHw9fxEolWLQnlzP0h_LgJAfI,91
17
+ restage-0.3.1.dist-info/entry_points.txt,sha256=dY3ZanXW6rBLd2vBo-WMcbgO7E10GVDE0t9GASuVjgY,90
18
+ restage-0.3.1.dist-info/top_level.txt,sha256=iM_pb-taTZ0S2WMoDnt_qDMZoNMjmM19z3tTCuVm1IE,8
19
+ restage-0.3.1.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.42.0)
2
+ Generator: setuptools (74.0.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5