rbx.cp 0.5.40__py3-none-any.whl → 0.5.45__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. rbx/box/builder.py +6 -6
  2. rbx/box/checkers.py +100 -25
  3. rbx/box/cli.py +868 -0
  4. rbx/box/code.py +272 -84
  5. rbx/box/contest/statements.py +4 -2
  6. rbx/box/generators.py +55 -49
  7. rbx/box/generators_test.py +7 -7
  8. rbx/box/main.py +1 -868
  9. rbx/box/package.py +57 -2
  10. rbx/box/packaging/boca/packager.py +2 -1
  11. rbx/box/packaging/main.py +17 -9
  12. rbx/box/packaging/moj/packager.py +49 -10
  13. rbx/box/retries.py +5 -5
  14. rbx/box/schema.py +20 -4
  15. rbx/box/solutions.py +46 -108
  16. rbx/box/solutions_test.py +5 -6
  17. rbx/box/state.py +1 -0
  18. rbx/box/statements/build_statements.py +4 -2
  19. rbx/box/stresses.py +23 -12
  20. rbx/box/tasks.py +277 -0
  21. rbx/box/testcase_extractors.py +21 -21
  22. rbx/box/testcases/main.py +19 -14
  23. rbx/box/unit.py +10 -7
  24. rbx/box/validators.py +10 -10
  25. rbx/box/validators_test.py +3 -3
  26. rbx/grading/judge/cacher.py +0 -4
  27. rbx/grading/judge/digester.py +0 -3
  28. rbx/grading/judge/sandbox.py +15 -0
  29. rbx/grading/judge/sandboxes/stupid_sandbox.py +20 -6
  30. rbx/grading/judge/sandboxes/timeit.py +117 -7
  31. rbx/grading/judge/storage.py +0 -4
  32. rbx/grading/steps.py +76 -2
  33. rbx/grading/steps_with_caching.py +45 -3
  34. rbx/grading/steps_with_caching_run_test.py +51 -49
  35. rbx/main.py +0 -4
  36. rbx/resources/packagers/moj/scripts/compare.sh +25 -6
  37. rbx/test.py +6 -4
  38. {rbx_cp-0.5.40.dist-info → rbx_cp-0.5.45.dist-info}/METADATA +2 -2
  39. {rbx_cp-0.5.40.dist-info → rbx_cp-0.5.45.dist-info}/RECORD +42 -55
  40. {rbx_cp-0.5.40.dist-info → rbx_cp-0.5.45.dist-info}/WHEEL +1 -1
  41. rbx/testdata/box1/gen1.cpp +0 -7
  42. rbx/testdata/box1/gen2.cpp +0 -9
  43. rbx/testdata/box1/genScript.py +0 -2
  44. rbx/testdata/box1/hard-tle.sol.cpp +0 -26
  45. rbx/testdata/box1/ole.cpp +0 -17
  46. rbx/testdata/box1/problem.rbx.yml +0 -39
  47. rbx/testdata/box1/re.sol.cpp +0 -23
  48. rbx/testdata/box1/sol.cpp +0 -22
  49. rbx/testdata/box1/tests/1.in +0 -1
  50. rbx/testdata/box1/tle-and-incorrect.sol.cpp +0 -33
  51. rbx/testdata/box1/tle.sol.cpp +0 -35
  52. rbx/testdata/box1/validator.cpp +0 -11
  53. rbx/testdata/box1/wa.sol.cpp +0 -22
  54. rbx/testdata/caching/executable.py +0 -1
  55. rbx/testdata/compatible +0 -0
  56. {rbx_cp-0.5.40.dist-info → rbx_cp-0.5.45.dist-info}/LICENSE +0 -0
  57. {rbx_cp-0.5.40.dist-info → rbx_cp-0.5.45.dist-info}/entry_points.txt +0 -0
rbx/box/unit.py CHANGED
@@ -1,5 +1,7 @@
1
1
  from typing import List, Optional
2
2
 
3
+ import syncer
4
+
3
5
  from rbx import console
4
6
  from rbx.box import checkers, package, validators
5
7
  from rbx.box.schema import CodeItem, Testcase, ValidatorOutcome, ValidatorTest
@@ -13,7 +15,7 @@ def _get_validator_for_test(test: ValidatorTest) -> Optional[CodeItem]:
13
15
  return pkg.validator
14
16
 
15
17
 
16
- def run_validator_unit_tests(progress: StatusProgress):
18
+ async def run_validator_unit_tests(progress: StatusProgress):
17
19
  pkg = package.find_problem_package_or_die()
18
20
 
19
21
  vals: List[CodeItem] = []
@@ -38,7 +40,7 @@ def run_validator_unit_tests(progress: StatusProgress):
38
40
  continue
39
41
 
40
42
  compiled_digest = compiled_validators[str(val.path)]
41
- info = validators.validate_one_off(
43
+ info = await validators.validate_one_off(
42
44
  test.input,
43
45
  val,
44
46
  compiled_digest,
@@ -61,7 +63,7 @@ def run_validator_unit_tests(progress: StatusProgress):
61
63
  console.console.print(f' [status]Actual[/status] {info.message}')
62
64
 
63
65
 
64
- def run_checker_unit_tests(progress: StatusProgress):
66
+ async def run_checker_unit_tests(progress: StatusProgress):
65
67
  pkg = package.find_problem_package_or_die()
66
68
  if not pkg.unitTests.checker:
67
69
  return
@@ -82,7 +84,7 @@ def run_checker_unit_tests(progress: StatusProgress):
82
84
  empty_file = package.get_empty_sentinel_path()
83
85
 
84
86
  for i, test in enumerate(pkg.unitTests.checker):
85
- result = checkers.check(
87
+ result = await checkers.check(
86
88
  compiled_digest,
87
89
  run_log=None,
88
90
  testcase=Testcase(
@@ -108,6 +110,7 @@ def run_checker_unit_tests(progress: StatusProgress):
108
110
  console.console.print(f' [status]Message[/status] {result.message}')
109
111
 
110
112
 
111
- def run_unit_tests(progress: StatusProgress):
112
- run_validator_unit_tests(progress)
113
- run_checker_unit_tests(progress)
113
+ @syncer.sync
114
+ async def run_unit_tests(progress: StatusProgress):
115
+ await run_validator_unit_tests(progress)
116
+ await run_checker_unit_tests(progress)
rbx/box/validators.py CHANGED
@@ -86,7 +86,7 @@ def _has_group_specific_validator() -> bool:
86
86
  return any(group.validator is not None for group in pkg.testcases)
87
87
 
88
88
 
89
- def _validate_testcase(
89
+ async def _validate_testcase(
90
90
  testcase: pathlib.Path,
91
91
  validator: CodeItem,
92
92
  validator_digest: str,
@@ -103,7 +103,7 @@ def _validate_testcase(
103
103
 
104
104
  message_digest = DigestHolder()
105
105
  log_digest = DigestHolder()
106
- run_log = run_item(
106
+ run_log = await run_item(
107
107
  validator,
108
108
  DigestOrSource.create(validator_digest),
109
109
  stdin=DigestOrSource.create(testcase),
@@ -140,13 +140,13 @@ def _validate_testcase(
140
140
  )
141
141
 
142
142
 
143
- def _validate_test(
143
+ async def _validate_test(
144
144
  testcase: pathlib.Path,
145
145
  validator: CodeItem,
146
146
  validator_digest: str,
147
147
  ) -> Tuple[bool, Optional[str], HitBounds]:
148
148
  pkg = package.find_problem_package_or_die()
149
- return _validate_testcase(
149
+ return await _validate_testcase(
150
150
  testcase, validator, validator_digest, vars=pkg.expanded_vars
151
151
  )
152
152
 
@@ -159,12 +159,12 @@ def compile_main_validator() -> Optional[Tuple[CodeItem, str]]:
159
159
  return pkg.validator, _compile_validator(pkg.validator)
160
160
 
161
161
 
162
- def validate_one_off(
162
+ async def validate_one_off(
163
163
  testcase: pathlib.Path,
164
164
  validator: CodeItem,
165
165
  validator_digest: str,
166
166
  ) -> TestcaseValidationInfo:
167
- ok, message, _ = _validate_test(testcase, validator, validator_digest)
167
+ ok, message, _ = await _validate_test(testcase, validator, validator_digest)
168
168
  info = TestcaseValidationInfo(
169
169
  validator=validator,
170
170
  group='interactive',
@@ -211,7 +211,7 @@ def compile_validators_for_entries(
211
211
  return compile_validators(validators, progress=progress)
212
212
 
213
213
 
214
- def validate_testcases(
214
+ async def validate_testcases(
215
215
  progress: Optional[StatusProgress] = None,
216
216
  groups: Optional[Set[str]] = None,
217
217
  ) -> List[TestcaseValidationInfo]:
@@ -219,7 +219,7 @@ def validate_testcases(
219
219
  if progress is not None:
220
220
  progress.step()
221
221
 
222
- validation_entries = extract_generation_testcases_from_groups(groups)
222
+ validation_entries = await extract_generation_testcases_from_groups(groups)
223
223
  validator_to_compiled_digest = compile_validators_for_entries(
224
224
  validation_entries, progress=progress
225
225
  )
@@ -234,7 +234,7 @@ def validate_testcases(
234
234
  # Main validation.
235
235
  if entry.validator is not None:
236
236
  compiled_digest = validator_to_compiled_digest[str(entry.validator.path)]
237
- ok, message, hit_bounds = _validate_test(
237
+ ok, message, hit_bounds = await _validate_test(
238
238
  input_path, entry.validator, compiled_digest
239
239
  )
240
240
  validation_info.append(
@@ -250,7 +250,7 @@ def validate_testcases(
250
250
 
251
251
  for extra_validator in entry.extra_validators:
252
252
  compiled_digest = validator_to_compiled_digest[str(extra_validator.path)]
253
- ok, message, hit_bounds = _validate_test(
253
+ ok, message, hit_bounds = await _validate_test(
254
254
  input_path, extra_validator, compiled_digest
255
255
  )
256
256
  validation_info.append(
@@ -7,9 +7,9 @@ from rbx.box.validators import validate_testcases
7
7
 
8
8
 
9
9
  @pytest.mark.test_pkg('box1')
10
- def test_validators(pkg_from_testdata: pathlib.Path):
11
- generate_testcases()
12
- validation_infos = validate_testcases()
10
+ async def test_validators(pkg_from_testdata: pathlib.Path):
11
+ await generate_testcases()
12
+ validation_infos = await validate_testcases()
13
13
 
14
14
  for info in validation_infos:
15
15
  assert info.ok
@@ -9,8 +9,6 @@ import tempfile
9
9
  import typing
10
10
  from typing import IO, List, Optional
11
11
 
12
- import gevent
13
-
14
12
  from rbx.grading.judge import digester, storage
15
13
 
16
14
  logger = logging.getLogger(__name__)
@@ -318,8 +316,6 @@ class FileCacher:
318
316
  d.update(buf)
319
317
  while len(buf) > 0:
320
318
  written = dst.write(buf)
321
- # Cooperative yield.
322
- gevent.sleep(0)
323
319
  if written is None:
324
320
  break
325
321
  buf = buf[written:]
@@ -2,8 +2,6 @@ import hashlib
2
2
  import pathlib
3
3
  from typing import IO
4
4
 
5
- import gevent
6
-
7
5
 
8
6
  class Digester:
9
7
  """Simple wrapper of hashlib using our preferred hasher."""
@@ -26,7 +24,6 @@ def digest_cooperatively_into_digester(
26
24
  buf = f.read(chunk_size)
27
25
  while len(buf) > 0:
28
26
  digester.update(buf)
29
- gevent.sleep(0)
30
27
  buf = f.read(chunk_size)
31
28
 
32
29
 
@@ -1,4 +1,5 @@
1
1
  import abc
2
+ import collections
2
3
  import dataclasses
3
4
  import io
4
5
  import logging
@@ -112,6 +113,13 @@ class SandboxParams(pydantic.BaseModel):
112
113
  timeout: Optional[int] = None # ms
113
114
  wallclock_timeout: Optional[int] = None # ms
114
115
  extra_timeout: Optional[int] = None # ms
116
+ reverse_io: bool = False
117
+
118
+ # For timeit
119
+ timeit_dups: Dict[str, List[pathlib.Path]] = dataclasses.field(
120
+ default_factory=lambda: collections.defaultdict(list)
121
+ )
122
+ timeit_prefix: Optional[str] = None
115
123
 
116
124
  def get_cacheable_params(self) -> Dict[str, Any]:
117
125
  return self.model_dump(mode='json', exclude_unset=True, exclude_none=True)
@@ -393,6 +401,13 @@ class SandboxBase(abc.ABC):
393
401
  return None
394
402
  return real_path
395
403
 
404
+ def create_fifo(self, path: pathlib.Path, override: bool = False):
405
+ real_path = self.relative_path(path)
406
+ if override:
407
+ real_path.unlink(missing_ok=True)
408
+ os.mkfifo(str(real_path))
409
+ return real_path
410
+
396
411
  def create_file_from_storage(
397
412
  self,
398
413
  path: pathlib.Path,
@@ -91,16 +91,30 @@ class StupidSandbox(SandboxBase):
91
91
  args.append(f'-w{walltimeout_in_s:.3f}')
92
92
  if self.params.address_space:
93
93
  args.append(f'-m{self.params.address_space}')
94
- if self.params.stdin_file:
95
- args.append(f'-i{self.params.stdin_file}')
96
- if self.params.stdout_file:
97
- args.append(f'-o{self.params.stdout_file}')
98
- if self.params.stderr_file:
99
- args.append(f'-e{self.params.stderr_file}')
100
94
  if self.params.fsize:
101
95
  args.append(f'-f{self.params.fsize}')
102
96
  if self.chdir:
103
97
  args.append(f'-c{self.chdir}')
98
+
99
+ file_args = []
100
+ if self.params.stdin_file:
101
+ file_args.append(f'-i{self.params.stdin_file}')
102
+ if self.params.stdout_file:
103
+ file_args.append(f'-o{self.params.stdout_file}')
104
+ if self.params.stderr_file:
105
+ file_args.append(f'-e{self.params.stderr_file}')
106
+ if self.params.reverse_io:
107
+ file_args.reverse()
108
+ args.extend(file_args)
109
+
110
+ if self.params.timeit_dups:
111
+ for i, files in self.params.timeit_dups.items():
112
+ assert i.lower() in ['di', 'do', 'de']
113
+ for file in files:
114
+ args.append(f'-{i}{file}')
115
+ if self.params.timeit_prefix:
116
+ args.append(f'-P{self.params.timeit_prefix}')
117
+
104
118
  return args
105
119
 
106
120
  def get_root_path(self) -> pathlib.Path:
@@ -6,10 +6,10 @@ import signal
6
6
  import stat
7
7
  import sys
8
8
  from time import monotonic
9
- from typing import List, Optional
9
+ from typing import Any, Dict, List, Optional, Set, Union
10
10
 
11
11
 
12
- @dataclasses.dataclass()
12
+ @dataclasses.dataclass
13
13
  class Options:
14
14
  output_file: str
15
15
  argv: List[str]
@@ -21,14 +21,100 @@ class Options:
21
21
  wall_time_limit: Optional[float] = None # seconds
22
22
  memory_limit: Optional[int] = None # kb, but passed in args as mb
23
23
  fs_limit: Optional[int] = None # kb
24
+ files_to_open: List[int] = dataclasses.field(default_factory=list)
25
+ file_duplicates: Dict[int, List[str]] = dataclasses.field(default_factory=dict)
26
+ prefixed: Set[str] = dataclasses.field(default_factory=set)
27
+ prefix: str = ''
24
28
 
25
29
 
26
30
  def exit_with(code: int):
27
31
  sys.exit(code)
28
32
 
29
33
 
34
+ def get_tee_command(files: List[str]) -> str:
35
+ path = (
36
+ os.path.join(os.path.dirname(os.path.realpath(__file__)), 'tee.py')
37
+ + ' '
38
+ + ' '.join(files)
39
+ )
40
+ return sys.executable + ' ' + path
41
+
42
+
43
+ valid_modes = ['a', 'w']
44
+
45
+
46
+ @dataclasses.dataclass
47
+ class Tee:
48
+ file: Any
49
+ prefix: Union[str, bytes] = ''
50
+
51
+
52
+ def create_tee(files, mode, buffer_size=4096, prefix=''):
53
+ """Get a file object that will mirror writes across multiple files objs
54
+
55
+ Options:
56
+ files A list of files and/or file objects. All strings will be
57
+ treated as file paths and opened for writing. Everything
58
+ else is assumed to be a file-like object that implements
59
+ both the write() and flush() methods.
60
+
61
+ mode Which mode to use when opening new files. Valid values
62
+ are 'a' (append) and 'w' (overwrite).
63
+
64
+ buffer_size
65
+ Control the size of the buffer between writes to the
66
+ resulting file object and the list of files.
67
+ """
68
+ if mode not in valid_modes:
69
+ raise IOError(
70
+ 'Only valid modes to create_tee() are: %s' % ', '.join(valid_modes)
71
+ )
72
+
73
+ tee_list = []
74
+ for file in files:
75
+ if isinstance(file, Tee):
76
+ tee_list.append(file)
77
+ else:
78
+ tee_list.append(Tee(file))
79
+ for tee in tee_list:
80
+ if isinstance(tee.file, str):
81
+ tee.file = open(tee.file, f'{mode}b')
82
+ if isinstance(tee.prefix, str):
83
+ tee.prefix = tee.prefix.encode()
84
+
85
+ pipe_read, pipe_write = os.pipe()
86
+ pid = os.fork()
87
+ if pid == 0:
88
+ # Child -- Read bytes from the pipe and write them to the specified
89
+ # files.
90
+ try:
91
+ # Close parent's end of the pipe
92
+ os.close(pipe_write)
93
+
94
+ bytes = os.read(pipe_read, buffer_size)
95
+ while bytes:
96
+ for tee in tee_list:
97
+ if tee.prefix:
98
+ tee.file.write(tee.prefix)
99
+ tee.file.write(bytes)
100
+ tee.file.flush()
101
+ # TODO maybe add in fsync() here if the fileno() method
102
+ # exists on file
103
+
104
+ bytes = os.read(pipe_read, buffer_size)
105
+ except Exception:
106
+ pass
107
+ finally:
108
+ os._exit(255)
109
+ else:
110
+ # Parent -- Return a file object wrapper around the pipe to the
111
+ # child.
112
+ return os.fdopen(pipe_write, 'w', closefd=False)
113
+
114
+
30
115
  def parse_opts() -> Options:
31
116
  options = Options(output_file=sys.argv[1], argv=[])
117
+ options.files_to_open = []
32
118
  num_opts = 0
33
119
  while num_opts + 2 < len(sys.argv) and sys.argv[num_opts + 2].startswith('-'):
34
120
  # Process option
@@ -41,14 +127,28 @@ def parse_opts() -> Options:
41
127
  options.memory_limit = int(opt[2:]) * 1024
42
128
  elif opt.startswith('-i'):
43
129
  options.stdin_file = opt[2:]
130
+ options.files_to_open.append(0)
44
131
  elif opt.startswith('-o'):
45
132
  options.stdout_file = opt[2:]
133
+ options.files_to_open.append(1)
46
134
  elif opt.startswith('-e'):
47
135
  options.stderr_file = opt[2:]
136
+ options.files_to_open.append(2)
137
+ elif opt.startswith('-d') or opt.startswith('-D'):
138
+ is_prefixed = opt.startswith('-D')
139
+ possibilities = [None, 'o', 'e']
140
+ index = possibilities.index(opt[2])
141
+ if index not in options.file_duplicates:
142
+ options.file_duplicates[index] = []
143
+ options.file_duplicates[index].append(opt[3:])
144
+ if is_prefixed:
145
+ options.prefixed.add(opt[3:])
48
146
  elif opt.startswith('-c'):
49
147
  options.chdir = opt[2:]
50
148
  elif opt.startswith('-f'):
51
149
  options.fs_limit = int(opt[2:])
150
+ elif opt.startswith('-P'):
151
+ options.prefix = opt[2:]
52
152
  else:
53
153
  raise Exception(f'Invalid option {opt}')
54
154
  num_opts += 1
@@ -92,7 +192,8 @@ def set_rlimits(options: Options):
92
192
  def redirect_fds(options: Options):
93
193
  files = [options.stdin_file, options.stdout_file, options.stderr_file]
94
194
 
95
- for i, file in enumerate(files):
195
+ for i in options.files_to_open:
196
+ file = files[i]
96
197
  if file is None:
97
198
  continue
98
199
  open_args = [
@@ -102,10 +203,18 @@ def redirect_fds(options: Options):
102
203
  if i == 0:
103
204
  # stdin
104
205
  open_args = [os.O_RDONLY]
105
- fd = os.open(
106
- file,
107
- *open_args,
108
- )
206
+ if i in options.file_duplicates:
207
+ dups = [
208
+ Tee(f, prefix=options.prefix if f in options.prefixed else '')
209
+ for f in options.file_duplicates[i]
210
+ ]
211
+ tee = create_tee(dups + [file], 'a', prefix=options.prefix)
212
+ fd = tee.fileno()
213
+ else:
214
+ fd = os.open(
215
+ file,
216
+ *open_args,
217
+ )
109
218
  os.dup2(fd, i)
110
219
  os.close(fd)
111
220
 
@@ -215,3 +324,4 @@ def main():
215
324
 
216
325
  if __name__ == '__main__':
217
326
  main()
327
+ # type: ignore
@@ -6,8 +6,6 @@ import tempfile
6
6
  from abc import ABC, abstractmethod
7
7
  from typing import IO, AnyStr, List, Optional
8
8
 
9
- import gevent
10
-
11
9
  logger = logging.getLogger(__name__)
12
10
 
13
11
  TOMBSTONE = 'x'
@@ -40,11 +38,9 @@ def copyfileobj(
40
38
  if maxlen > 0 and maxlen < len(buffer):
41
39
  buffer = buffer[:maxlen]
42
40
  while len(buffer) > 0:
43
- gevent.sleep(0)
44
41
  written = destination_fobj.write(buffer)
45
42
  buffer = buffer[written:]
46
43
  maxlen -= written
47
- gevent.sleep(0)
48
44
 
49
45
 
50
46
  @dataclasses.dataclass
rbx/grading/steps.py CHANGED
@@ -1,10 +1,16 @@
1
+ import asyncio
2
+ import contextlib
3
+ import dataclasses
1
4
  import functools
5
+ import os
2
6
  import pathlib
3
7
  import re
4
8
  import shlex
5
9
  import shutil
6
10
  import subprocess
7
11
  import sys
12
+ import tempfile
13
+ import typing
8
14
  from enum import Enum
9
15
  from typing import IO, Any, Dict, Iterable, List, Optional, Tuple, Union
10
16
 
@@ -123,6 +129,8 @@ class GradingFileOutput(BaseModel):
123
129
  intermediate: bool = False
124
130
  # Whether to track file through its hash (disable for optimization).
125
131
  hash: bool = True
132
+ # Whether to touch the file before the command runs.
133
+ touch: bool = False
126
134
 
127
135
  def get_file(self, storage: Storage) -> Optional[IO[bytes]]:
128
136
  if self.dest is not None:
@@ -136,6 +144,15 @@ class GradingFileOutput(BaseModel):
136
144
  raise ValueError('No file to get')
137
145
 
138
146
 
147
+ class GradingFifo(BaseModel):
148
+ # Destination path relative to the sandbox.
149
+ path: pathlib.Path
150
+ # Symlink to the FIFO outside the sandbox.
151
+ symlink: Optional[pathlib.Path] = None
152
+ # Whether to create the FIFO if it does not exist.
153
+ create: bool = True
154
+
155
+
139
156
  class GradingArtifacts(BaseModel):
140
157
  # Root directory for the produced artifacts.
141
158
  root: pathlib.Path = pathlib.PosixPath('.')
@@ -143,6 +160,8 @@ class GradingArtifacts(BaseModel):
143
160
  inputs: List[GradingFileInput] = []
144
161
  # List of output files to copy from the sandbox.
145
162
  outputs: List[GradingFileOutput] = []
163
+ # List of FIFOs
164
+ fifos: List[GradingFifo] = []
146
165
  # Capture certain logs of the execution.
147
166
  logs: Optional[GradingLogsHolder] = None
148
167
 
@@ -241,6 +260,14 @@ def _process_input_artifacts(artifacts: GradingArtifacts, sandbox: SandboxBase):
241
260
  override=True,
242
261
  try_symlink=True,
243
262
  )
263
+ for output_artifact in artifacts.outputs:
264
+ if output_artifact.touch:
265
+ sandbox.create_file_from_string(
266
+ output_artifact.src,
267
+ '',
268
+ executable=output_artifact.executable,
269
+ override=True,
270
+ )
244
271
 
245
272
 
246
273
  def _process_output_artifacts(
@@ -278,6 +305,14 @@ def _process_output_artifacts(
278
305
  return True
279
306
 
280
307
 
308
+ def _process_fifos(artifacts: GradingArtifacts, sandbox: SandboxBase):
309
+ for fifo in artifacts.fifos:
310
+ if fifo.symlink is not None:
311
+ sandbox.create_symlink(fifo.path, fifo.symlink, override=True)
312
+ else:
313
+ sandbox.create_fifo(fifo.path, override=True)
314
+
315
+
281
316
  def testlib_grading_input() -> GradingFileInput:
282
317
  return GradingFileInput(src=get_testlib(), dest=pathlib.Path('testlib.h'))
283
318
 
@@ -553,7 +588,7 @@ def compile(
553
588
  return _process_output_artifacts(artifacts, sandbox)
554
589
 
555
590
 
556
- def run(
591
+ async def run(
557
592
  command: str,
558
593
  params: SandboxParams,
559
594
  sandbox: SandboxBase,
@@ -561,10 +596,11 @@ def run(
561
596
  metadata: Optional[RunLogMetadata] = None,
562
597
  ) -> Optional[RunLog]:
563
598
  _process_input_artifacts(artifacts, sandbox)
599
+ _process_fifos(artifacts, sandbox)
564
600
  cmd = _split_and_expand(command, sandbox)
565
601
  sandbox.set_params(params)
566
602
 
567
- if not sandbox.execute_without_std(cmd):
603
+ if not await asyncio.to_thread(sandbox.execute_without_std, cmd):
568
604
  console.print(
569
605
  '[error]Sandbox crashed while processing command:[/error]',
570
606
  utils.highlight_json_obj(cmd),
@@ -600,6 +636,34 @@ def run(
600
636
  return run_log
601
637
 
602
638
 
639
+ @dataclasses.dataclass
640
+ class CoordinatedRunParams:
641
+ command: str
642
+ params: SandboxParams
643
+ sandbox: SandboxBase
644
+ artifacts: GradingArtifacts
645
+ metadata: Optional[RunLogMetadata] = None
646
+
647
+
648
+ async def run_coordinated(
649
+ interactor: CoordinatedRunParams,
650
+ solution: CoordinatedRunParams,
651
+ ) -> Tuple[Optional[RunLog], Optional[RunLog]]:
652
+ runs = tuple(
653
+ run(
654
+ params.command,
655
+ params.params,
656
+ params.sandbox,
657
+ params.artifacts,
658
+ params.metadata,
659
+ )
660
+ for params in [interactor, solution]
661
+ )
662
+ return typing.cast(
663
+ Tuple[Optional[RunLog], Optional[RunLog]], tuple(await asyncio.gather(*runs))
664
+ )
665
+
666
+
603
667
  def _normalize_checked_words(s: str) -> Tuple[str, ...]:
604
668
  return tuple(s.split())
605
669
 
@@ -722,3 +786,13 @@ def evaluate(
722
786
  log=log,
723
787
  result=checker_result,
724
788
  )
789
+
790
+
791
+ @contextlib.contextmanager
792
+ def make_fifos():
793
+ with tempfile.TemporaryDirectory() as temp_dir:
794
+ fifo_in = pathlib.PosixPath(temp_dir) / 'fifo.in'
795
+ fifo_out = pathlib.PosixPath(temp_dir) / 'fifo.out'
796
+ os.mkfifo(fifo_in)
797
+ os.mkfifo(fifo_out)
798
+ yield fifo_in, fifo_out
@@ -1,4 +1,4 @@
1
- from typing import List, Optional
1
+ from typing import Any, Dict, List, Optional, Tuple
2
2
 
3
3
  from rbx.grading import steps
4
4
  from rbx.grading.caching import DependencyCache, NoCacheException
@@ -11,6 +11,12 @@ from rbx.grading.steps import (
11
11
  )
12
12
 
13
13
 
14
+ def _get_prefixed_cacheable_params(
15
+ params: Dict[str, Any], prefix: str
16
+ ) -> Dict[str, Any]:
17
+ return {f'{prefix}.{k}': v for k, v in params.items()}
18
+
19
+
14
20
  def compile(
15
21
  commands: List[str],
16
22
  params: SandboxParams,
@@ -36,7 +42,7 @@ def compile(
36
42
  return ok
37
43
 
38
44
 
39
- def run(
45
+ async def run(
40
46
  command: str,
41
47
  params: SandboxParams,
42
48
  sandbox: SandboxBase,
@@ -52,7 +58,7 @@ def run(
52
58
 
53
59
  with dependency_cache([command], [artifacts], cacheable_params) as is_cached:
54
60
  if not is_cached:
55
- steps.run(
61
+ await steps.run(
56
62
  command=command,
57
63
  params=params,
58
64
  artifacts=artifacts,
@@ -61,3 +67,39 @@ def run(
61
67
  )
62
68
 
63
69
  return artifacts.logs.run
70
+
71
+
72
+ async def run_coordinated(
73
+ interactor: steps.CoordinatedRunParams,
74
+ solution: steps.CoordinatedRunParams,
75
+ dependency_cache: DependencyCache,
76
+ ) -> Tuple[Optional[RunLog], Optional[RunLog]]:
77
+ interactor.artifacts.logs = GradingLogsHolder()
78
+ solution.artifacts.logs = GradingLogsHolder()
79
+
80
+ cacheable_params = {
81
+ **_get_prefixed_cacheable_params(
82
+ interactor.params.get_cacheable_params(), 'interactor'
83
+ ),
84
+ **_get_prefixed_cacheable_params(
85
+ solution.params.get_cacheable_params(), 'solution'
86
+ ),
87
+ }
88
+
89
+ if interactor.metadata is not None and interactor.metadata.retryIndex is not None:
90
+ cacheable_params['interactor.__retry_index__'] = interactor.metadata.retryIndex
91
+ if solution.metadata is not None and solution.metadata.retryIndex is not None:
92
+ cacheable_params['solution.__retry_index__'] = solution.metadata.retryIndex
93
+
94
+ with dependency_cache(
95
+ [interactor.command, solution.command],
96
+ [interactor.artifacts, solution.artifacts],
97
+ cacheable_params,
98
+ ) as is_cached:
99
+ if not is_cached:
100
+ await steps.run_coordinated(interactor, solution)
101
+
102
+ return (
103
+ interactor.artifacts.logs.run,
104
+ solution.artifacts.logs.run,
105
+ )