rbx.cp 0.5.40__py3-none-any.whl → 0.5.42__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- rbx/box/builder.py +6 -6
- rbx/box/checkers.py +100 -25
- rbx/box/cli.py +860 -0
- rbx/box/code.py +199 -84
- rbx/box/contest/statements.py +4 -2
- rbx/box/generators.py +55 -49
- rbx/box/generators_test.py +7 -7
- rbx/box/main.py +1 -864
- rbx/box/package.py +42 -1
- rbx/box/packaging/boca/packager.py +2 -1
- rbx/box/packaging/main.py +17 -9
- rbx/box/packaging/moj/packager.py +49 -10
- rbx/box/retries.py +5 -5
- rbx/box/schema.py +20 -4
- rbx/box/solutions.py +46 -108
- rbx/box/solutions_test.py +5 -6
- rbx/box/statements/build_statements.py +4 -2
- rbx/box/stresses.py +23 -12
- rbx/box/tasks.py +258 -0
- rbx/box/testcase_extractors.py +21 -21
- rbx/box/testcases/main.py +19 -14
- rbx/box/unit.py +10 -7
- rbx/box/validators.py +10 -10
- rbx/box/validators_test.py +3 -3
- rbx/grading/judge/sandbox.py +8 -0
- rbx/grading/judge/sandboxes/stupid_sandbox.py +12 -7
- rbx/grading/judge/sandboxes/timeit.py +8 -2
- rbx/grading/steps.py +76 -2
- rbx/grading/steps_with_caching.py +45 -3
- rbx/grading/steps_with_caching_run_test.py +51 -49
- rbx/resources/packagers/moj/scripts/compare.sh +25 -6
- rbx/test.py +6 -4
- rbx/testdata/interactive/checker.cpp +21 -0
- rbx/testdata/interactive/gen.cpp +11 -0
- rbx/testdata/interactive/interactor.cpp +63 -0
- rbx/testdata/interactive/problem.rbx.yml +40 -0
- rbx/testdata/interactive/sols/af_ac_pe.cpp +75 -0
- rbx/testdata/interactive/sols/af_ac_re.cpp +76 -0
- rbx/testdata/interactive/sols/af_ac_too_many_iter.cpp +72 -0
- rbx/testdata/interactive/sols/af_inf_cout_with_flush.cpp +79 -0
- rbx/testdata/interactive/sols/af_inf_cout_without_flush.cpp +78 -0
- rbx/testdata/interactive/sols/af_ml.cpp +78 -0
- rbx/testdata/interactive/sols/af_tl_after_ans.cpp +74 -0
- rbx/testdata/interactive/sols/af_wa.cpp +74 -0
- rbx/testdata/interactive/sols/interactive-binary-search_mm_naive_cin.cpp +17 -0
- rbx/testdata/interactive/sols/main.cpp +26 -0
- rbx/testdata/interactive/testplan.txt +6 -0
- rbx/testdata/interactive/validator.cpp +16 -0
- {rbx_cp-0.5.40.dist-info → rbx_cp-0.5.42.dist-info}/METADATA +2 -1
- {rbx_cp-0.5.40.dist-info → rbx_cp-0.5.42.dist-info}/RECORD +53 -35
- {rbx_cp-0.5.40.dist-info → rbx_cp-0.5.42.dist-info}/LICENSE +0 -0
- {rbx_cp-0.5.40.dist-info → rbx_cp-0.5.42.dist-info}/WHEEL +0 -0
- {rbx_cp-0.5.40.dist-info → rbx_cp-0.5.42.dist-info}/entry_points.txt +0 -0
rbx/box/unit.py
CHANGED
@@ -1,5 +1,7 @@
|
|
1
1
|
from typing import List, Optional
|
2
2
|
|
3
|
+
import syncer
|
4
|
+
|
3
5
|
from rbx import console
|
4
6
|
from rbx.box import checkers, package, validators
|
5
7
|
from rbx.box.schema import CodeItem, Testcase, ValidatorOutcome, ValidatorTest
|
@@ -13,7 +15,7 @@ def _get_validator_for_test(test: ValidatorTest) -> Optional[CodeItem]:
|
|
13
15
|
return pkg.validator
|
14
16
|
|
15
17
|
|
16
|
-
def run_validator_unit_tests(progress: StatusProgress):
|
18
|
+
async def run_validator_unit_tests(progress: StatusProgress):
|
17
19
|
pkg = package.find_problem_package_or_die()
|
18
20
|
|
19
21
|
vals: List[CodeItem] = []
|
@@ -38,7 +40,7 @@ def run_validator_unit_tests(progress: StatusProgress):
|
|
38
40
|
continue
|
39
41
|
|
40
42
|
compiled_digest = compiled_validators[str(val.path)]
|
41
|
-
info = validators.validate_one_off(
|
43
|
+
info = await validators.validate_one_off(
|
42
44
|
test.input,
|
43
45
|
val,
|
44
46
|
compiled_digest,
|
@@ -61,7 +63,7 @@ def run_validator_unit_tests(progress: StatusProgress):
|
|
61
63
|
console.console.print(f' [status]Actual[/status] {info.message}')
|
62
64
|
|
63
65
|
|
64
|
-
def run_checker_unit_tests(progress: StatusProgress):
|
66
|
+
async def run_checker_unit_tests(progress: StatusProgress):
|
65
67
|
pkg = package.find_problem_package_or_die()
|
66
68
|
if not pkg.unitTests.checker:
|
67
69
|
return
|
@@ -82,7 +84,7 @@ def run_checker_unit_tests(progress: StatusProgress):
|
|
82
84
|
empty_file = package.get_empty_sentinel_path()
|
83
85
|
|
84
86
|
for i, test in enumerate(pkg.unitTests.checker):
|
85
|
-
result = checkers.check(
|
87
|
+
result = await checkers.check(
|
86
88
|
compiled_digest,
|
87
89
|
run_log=None,
|
88
90
|
testcase=Testcase(
|
@@ -108,6 +110,7 @@ def run_checker_unit_tests(progress: StatusProgress):
|
|
108
110
|
console.console.print(f' [status]Message[/status] {result.message}')
|
109
111
|
|
110
112
|
|
111
|
-
|
112
|
-
|
113
|
-
|
113
|
+
@syncer.sync
|
114
|
+
async def run_unit_tests(progress: StatusProgress):
|
115
|
+
await run_validator_unit_tests(progress)
|
116
|
+
await run_checker_unit_tests(progress)
|
rbx/box/validators.py
CHANGED
@@ -86,7 +86,7 @@ def _has_group_specific_validator() -> bool:
|
|
86
86
|
return any(group.validator is not None for group in pkg.testcases)
|
87
87
|
|
88
88
|
|
89
|
-
def _validate_testcase(
|
89
|
+
async def _validate_testcase(
|
90
90
|
testcase: pathlib.Path,
|
91
91
|
validator: CodeItem,
|
92
92
|
validator_digest: str,
|
@@ -103,7 +103,7 @@ def _validate_testcase(
|
|
103
103
|
|
104
104
|
message_digest = DigestHolder()
|
105
105
|
log_digest = DigestHolder()
|
106
|
-
run_log = run_item(
|
106
|
+
run_log = await run_item(
|
107
107
|
validator,
|
108
108
|
DigestOrSource.create(validator_digest),
|
109
109
|
stdin=DigestOrSource.create(testcase),
|
@@ -140,13 +140,13 @@ def _validate_testcase(
|
|
140
140
|
)
|
141
141
|
|
142
142
|
|
143
|
-
def _validate_test(
|
143
|
+
async def _validate_test(
|
144
144
|
testcase: pathlib.Path,
|
145
145
|
validator: CodeItem,
|
146
146
|
validator_digest: str,
|
147
147
|
) -> Tuple[bool, Optional[str], HitBounds]:
|
148
148
|
pkg = package.find_problem_package_or_die()
|
149
|
-
return _validate_testcase(
|
149
|
+
return await _validate_testcase(
|
150
150
|
testcase, validator, validator_digest, vars=pkg.expanded_vars
|
151
151
|
)
|
152
152
|
|
@@ -159,12 +159,12 @@ def compile_main_validator() -> Optional[Tuple[CodeItem, str]]:
|
|
159
159
|
return pkg.validator, _compile_validator(pkg.validator)
|
160
160
|
|
161
161
|
|
162
|
-
def validate_one_off(
|
162
|
+
async def validate_one_off(
|
163
163
|
testcase: pathlib.Path,
|
164
164
|
validator: CodeItem,
|
165
165
|
validator_digest: str,
|
166
166
|
) -> TestcaseValidationInfo:
|
167
|
-
ok, message, _ = _validate_test(testcase, validator, validator_digest)
|
167
|
+
ok, message, _ = await _validate_test(testcase, validator, validator_digest)
|
168
168
|
info = TestcaseValidationInfo(
|
169
169
|
validator=validator,
|
170
170
|
group='interactive',
|
@@ -211,7 +211,7 @@ def compile_validators_for_entries(
|
|
211
211
|
return compile_validators(validators, progress=progress)
|
212
212
|
|
213
213
|
|
214
|
-
def validate_testcases(
|
214
|
+
async def validate_testcases(
|
215
215
|
progress: Optional[StatusProgress] = None,
|
216
216
|
groups: Optional[Set[str]] = None,
|
217
217
|
) -> List[TestcaseValidationInfo]:
|
@@ -219,7 +219,7 @@ def validate_testcases(
|
|
219
219
|
if progress is not None:
|
220
220
|
progress.step()
|
221
221
|
|
222
|
-
validation_entries = extract_generation_testcases_from_groups(groups)
|
222
|
+
validation_entries = await extract_generation_testcases_from_groups(groups)
|
223
223
|
validator_to_compiled_digest = compile_validators_for_entries(
|
224
224
|
validation_entries, progress=progress
|
225
225
|
)
|
@@ -234,7 +234,7 @@ def validate_testcases(
|
|
234
234
|
# Main validation.
|
235
235
|
if entry.validator is not None:
|
236
236
|
compiled_digest = validator_to_compiled_digest[str(entry.validator.path)]
|
237
|
-
ok, message, hit_bounds = _validate_test(
|
237
|
+
ok, message, hit_bounds = await _validate_test(
|
238
238
|
input_path, entry.validator, compiled_digest
|
239
239
|
)
|
240
240
|
validation_info.append(
|
@@ -250,7 +250,7 @@ def validate_testcases(
|
|
250
250
|
|
251
251
|
for extra_validator in entry.extra_validators:
|
252
252
|
compiled_digest = validator_to_compiled_digest[str(extra_validator.path)]
|
253
|
-
ok, message, hit_bounds = _validate_test(
|
253
|
+
ok, message, hit_bounds = await _validate_test(
|
254
254
|
input_path, extra_validator, compiled_digest
|
255
255
|
)
|
256
256
|
validation_info.append(
|
rbx/box/validators_test.py
CHANGED
@@ -7,9 +7,9 @@ from rbx.box.validators import validate_testcases
|
|
7
7
|
|
8
8
|
|
9
9
|
@pytest.mark.test_pkg('box1')
|
10
|
-
def test_validators(pkg_from_testdata: pathlib.Path):
|
11
|
-
generate_testcases()
|
12
|
-
validation_infos = validate_testcases()
|
10
|
+
async def test_validators(pkg_from_testdata: pathlib.Path):
|
11
|
+
await generate_testcases()
|
12
|
+
validation_infos = await validate_testcases()
|
13
13
|
|
14
14
|
for info in validation_infos:
|
15
15
|
assert info.ok
|
rbx/grading/judge/sandbox.py
CHANGED
@@ -112,6 +112,7 @@ class SandboxParams(pydantic.BaseModel):
|
|
112
112
|
timeout: Optional[int] = None # ms
|
113
113
|
wallclock_timeout: Optional[int] = None # ms
|
114
114
|
extra_timeout: Optional[int] = None # ms
|
115
|
+
reverse_io: bool = False
|
115
116
|
|
116
117
|
def get_cacheable_params(self) -> Dict[str, Any]:
|
117
118
|
return self.model_dump(mode='json', exclude_unset=True, exclude_none=True)
|
@@ -393,6 +394,13 @@ class SandboxBase(abc.ABC):
|
|
393
394
|
return None
|
394
395
|
return real_path
|
395
396
|
|
397
|
+
def create_fifo(self, path: pathlib.Path, override: bool = False):
|
398
|
+
real_path = self.relative_path(path)
|
399
|
+
if override:
|
400
|
+
real_path.unlink(missing_ok=True)
|
401
|
+
os.mkfifo(str(real_path))
|
402
|
+
return real_path
|
403
|
+
|
396
404
|
def create_file_from_storage(
|
397
405
|
self,
|
398
406
|
path: pathlib.Path,
|
@@ -91,17 +91,22 @@ class StupidSandbox(SandboxBase):
|
|
91
91
|
args.append(f'-w{walltimeout_in_s:.3f}')
|
92
92
|
if self.params.address_space:
|
93
93
|
args.append(f'-m{self.params.address_space}')
|
94
|
-
if self.params.stdin_file:
|
95
|
-
args.append(f'-i{self.params.stdin_file}')
|
96
|
-
if self.params.stdout_file:
|
97
|
-
args.append(f'-o{self.params.stdout_file}')
|
98
|
-
if self.params.stderr_file:
|
99
|
-
args.append(f'-e{self.params.stderr_file}')
|
100
94
|
if self.params.fsize:
|
101
95
|
args.append(f'-f{self.params.fsize}')
|
102
96
|
if self.chdir:
|
103
97
|
args.append(f'-c{self.chdir}')
|
104
|
-
|
98
|
+
|
99
|
+
file_args = []
|
100
|
+
if self.params.stdin_file:
|
101
|
+
file_args.append(f'-i{self.params.stdin_file}')
|
102
|
+
if self.params.stdout_file:
|
103
|
+
file_args.append(f'-o{self.params.stdout_file}')
|
104
|
+
if self.params.stderr_file:
|
105
|
+
file_args.append(f'-e{self.params.stderr_file}')
|
106
|
+
if self.params.reverse_io:
|
107
|
+
file_args.reverse()
|
108
|
+
|
109
|
+
return args + file_args
|
105
110
|
|
106
111
|
def get_root_path(self) -> pathlib.Path:
|
107
112
|
"""Return the toplevel path of the sandbox.
|
@@ -9,7 +9,7 @@ from time import monotonic
|
|
9
9
|
from typing import List, Optional
|
10
10
|
|
11
11
|
|
12
|
-
@dataclasses.dataclass
|
12
|
+
@dataclasses.dataclass
|
13
13
|
class Options:
|
14
14
|
output_file: str
|
15
15
|
argv: List[str]
|
@@ -21,6 +21,7 @@ class Options:
|
|
21
21
|
wall_time_limit: Optional[float] = None # seconds
|
22
22
|
memory_limit: Optional[int] = None # kb, but passed in args as mb
|
23
23
|
fs_limit: Optional[int] = None # kb
|
24
|
+
files_to_open: List[int] = dataclasses.field(default_factory=list)
|
24
25
|
|
25
26
|
|
26
27
|
def exit_with(code: int):
|
@@ -29,6 +30,7 @@ def exit_with(code: int):
|
|
29
30
|
|
30
31
|
def parse_opts() -> Options:
|
31
32
|
options = Options(output_file=sys.argv[1], argv=[])
|
33
|
+
options.files_to_open = []
|
32
34
|
num_opts = 0
|
33
35
|
while num_opts + 2 < len(sys.argv) and sys.argv[num_opts + 2].startswith('-'):
|
34
36
|
# Process option
|
@@ -41,10 +43,13 @@ def parse_opts() -> Options:
|
|
41
43
|
options.memory_limit = int(opt[2:]) * 1024
|
42
44
|
elif opt.startswith('-i'):
|
43
45
|
options.stdin_file = opt[2:]
|
46
|
+
options.files_to_open.append(0)
|
44
47
|
elif opt.startswith('-o'):
|
45
48
|
options.stdout_file = opt[2:]
|
49
|
+
options.files_to_open.append(1)
|
46
50
|
elif opt.startswith('-e'):
|
47
51
|
options.stderr_file = opt[2:]
|
52
|
+
options.files_to_open.append(2)
|
48
53
|
elif opt.startswith('-c'):
|
49
54
|
options.chdir = opt[2:]
|
50
55
|
elif opt.startswith('-f'):
|
@@ -92,7 +97,8 @@ def set_rlimits(options: Options):
|
|
92
97
|
def redirect_fds(options: Options):
|
93
98
|
files = [options.stdin_file, options.stdout_file, options.stderr_file]
|
94
99
|
|
95
|
-
for i
|
100
|
+
for i in options.files_to_open:
|
101
|
+
file = files[i]
|
96
102
|
if file is None:
|
97
103
|
continue
|
98
104
|
open_args = [
|
rbx/grading/steps.py
CHANGED
@@ -1,10 +1,16 @@
|
|
1
|
+
import asyncio
|
2
|
+
import contextlib
|
3
|
+
import dataclasses
|
1
4
|
import functools
|
5
|
+
import os
|
2
6
|
import pathlib
|
3
7
|
import re
|
4
8
|
import shlex
|
5
9
|
import shutil
|
6
10
|
import subprocess
|
7
11
|
import sys
|
12
|
+
import tempfile
|
13
|
+
import typing
|
8
14
|
from enum import Enum
|
9
15
|
from typing import IO, Any, Dict, Iterable, List, Optional, Tuple, Union
|
10
16
|
|
@@ -123,6 +129,8 @@ class GradingFileOutput(BaseModel):
|
|
123
129
|
intermediate: bool = False
|
124
130
|
# Whether to track file through its hash (disable for optimization).
|
125
131
|
hash: bool = True
|
132
|
+
# Whether to touch the file before the command runs.
|
133
|
+
touch: bool = False
|
126
134
|
|
127
135
|
def get_file(self, storage: Storage) -> Optional[IO[bytes]]:
|
128
136
|
if self.dest is not None:
|
@@ -136,6 +144,15 @@ class GradingFileOutput(BaseModel):
|
|
136
144
|
raise ValueError('No file to get')
|
137
145
|
|
138
146
|
|
147
|
+
class GradingFifo(BaseModel):
|
148
|
+
# Destination path relative to the sandbox.
|
149
|
+
path: pathlib.Path
|
150
|
+
# Symlink to the FIFO outside the sandbox.
|
151
|
+
symlink: Optional[pathlib.Path] = None
|
152
|
+
# Whether to create the FIFO if it does not exist.
|
153
|
+
create: bool = True
|
154
|
+
|
155
|
+
|
139
156
|
class GradingArtifacts(BaseModel):
|
140
157
|
# Root directory for the produced artifacts.
|
141
158
|
root: pathlib.Path = pathlib.PosixPath('.')
|
@@ -143,6 +160,8 @@ class GradingArtifacts(BaseModel):
|
|
143
160
|
inputs: List[GradingFileInput] = []
|
144
161
|
# List of output files to copy from the sandbox.
|
145
162
|
outputs: List[GradingFileOutput] = []
|
163
|
+
# List of FIFOs
|
164
|
+
fifos: List[GradingFifo] = []
|
146
165
|
# Capture certain logs of the execution.
|
147
166
|
logs: Optional[GradingLogsHolder] = None
|
148
167
|
|
@@ -241,6 +260,14 @@ def _process_input_artifacts(artifacts: GradingArtifacts, sandbox: SandboxBase):
|
|
241
260
|
override=True,
|
242
261
|
try_symlink=True,
|
243
262
|
)
|
263
|
+
for output_artifact in artifacts.outputs:
|
264
|
+
if output_artifact.touch:
|
265
|
+
sandbox.create_file_from_string(
|
266
|
+
output_artifact.src,
|
267
|
+
'',
|
268
|
+
executable=output_artifact.executable,
|
269
|
+
override=True,
|
270
|
+
)
|
244
271
|
|
245
272
|
|
246
273
|
def _process_output_artifacts(
|
@@ -278,6 +305,14 @@ def _process_output_artifacts(
|
|
278
305
|
return True
|
279
306
|
|
280
307
|
|
308
|
+
def _process_fifos(artifacts: GradingArtifacts, sandbox: SandboxBase):
|
309
|
+
for fifo in artifacts.fifos:
|
310
|
+
if fifo.symlink is not None:
|
311
|
+
sandbox.create_symlink(fifo.path, fifo.symlink, override=True)
|
312
|
+
else:
|
313
|
+
sandbox.create_fifo(fifo.path, override=True)
|
314
|
+
|
315
|
+
|
281
316
|
def testlib_grading_input() -> GradingFileInput:
|
282
317
|
return GradingFileInput(src=get_testlib(), dest=pathlib.Path('testlib.h'))
|
283
318
|
|
@@ -553,7 +588,7 @@ def compile(
|
|
553
588
|
return _process_output_artifacts(artifacts, sandbox)
|
554
589
|
|
555
590
|
|
556
|
-
def run(
|
591
|
+
async def run(
|
557
592
|
command: str,
|
558
593
|
params: SandboxParams,
|
559
594
|
sandbox: SandboxBase,
|
@@ -561,10 +596,11 @@ def run(
|
|
561
596
|
metadata: Optional[RunLogMetadata] = None,
|
562
597
|
) -> Optional[RunLog]:
|
563
598
|
_process_input_artifacts(artifacts, sandbox)
|
599
|
+
_process_fifos(artifacts, sandbox)
|
564
600
|
cmd = _split_and_expand(command, sandbox)
|
565
601
|
sandbox.set_params(params)
|
566
602
|
|
567
|
-
if not sandbox.execute_without_std
|
603
|
+
if not await asyncio.to_thread(sandbox.execute_without_std, cmd):
|
568
604
|
console.print(
|
569
605
|
'[error]Sandbox crashed while processing command:[/error]',
|
570
606
|
utils.highlight_json_obj(cmd),
|
@@ -600,6 +636,34 @@ def run(
|
|
600
636
|
return run_log
|
601
637
|
|
602
638
|
|
639
|
+
@dataclasses.dataclass
|
640
|
+
class CoordinatedRunParams:
|
641
|
+
command: str
|
642
|
+
params: SandboxParams
|
643
|
+
sandbox: SandboxBase
|
644
|
+
artifacts: GradingArtifacts
|
645
|
+
metadata: Optional[RunLogMetadata] = None
|
646
|
+
|
647
|
+
|
648
|
+
async def run_coordinated(
|
649
|
+
interactor: CoordinatedRunParams,
|
650
|
+
solution: CoordinatedRunParams,
|
651
|
+
) -> Tuple[Optional[RunLog], Optional[RunLog]]:
|
652
|
+
runs = tuple(
|
653
|
+
run(
|
654
|
+
params.command,
|
655
|
+
params.params,
|
656
|
+
params.sandbox,
|
657
|
+
params.artifacts,
|
658
|
+
params.metadata,
|
659
|
+
)
|
660
|
+
for params in [interactor, solution]
|
661
|
+
)
|
662
|
+
return typing.cast(
|
663
|
+
Tuple[Optional[RunLog], Optional[RunLog]], tuple(await asyncio.gather(*runs))
|
664
|
+
)
|
665
|
+
|
666
|
+
|
603
667
|
def _normalize_checked_words(s: str) -> Tuple[str, ...]:
|
604
668
|
return tuple(s.split())
|
605
669
|
|
@@ -722,3 +786,13 @@ def evaluate(
|
|
722
786
|
log=log,
|
723
787
|
result=checker_result,
|
724
788
|
)
|
789
|
+
|
790
|
+
|
791
|
+
@contextlib.contextmanager
|
792
|
+
def make_fifos():
|
793
|
+
with tempfile.TemporaryDirectory() as temp_dir:
|
794
|
+
fifo_in = pathlib.PosixPath(temp_dir) / 'fifo.in'
|
795
|
+
fifo_out = pathlib.PosixPath(temp_dir) / 'fifo.out'
|
796
|
+
os.mkfifo(fifo_in)
|
797
|
+
os.mkfifo(fifo_out)
|
798
|
+
yield fifo_in, fifo_out
|
@@ -1,4 +1,4 @@
|
|
1
|
-
from typing import List, Optional
|
1
|
+
from typing import Any, Dict, List, Optional, Tuple
|
2
2
|
|
3
3
|
from rbx.grading import steps
|
4
4
|
from rbx.grading.caching import DependencyCache, NoCacheException
|
@@ -11,6 +11,12 @@ from rbx.grading.steps import (
|
|
11
11
|
)
|
12
12
|
|
13
13
|
|
14
|
+
def _get_prefixed_cacheable_params(
|
15
|
+
params: Dict[str, Any], prefix: str
|
16
|
+
) -> Dict[str, Any]:
|
17
|
+
return {f'{prefix}.{k}': v for k, v in params.items()}
|
18
|
+
|
19
|
+
|
14
20
|
def compile(
|
15
21
|
commands: List[str],
|
16
22
|
params: SandboxParams,
|
@@ -36,7 +42,7 @@ def compile(
|
|
36
42
|
return ok
|
37
43
|
|
38
44
|
|
39
|
-
def run(
|
45
|
+
async def run(
|
40
46
|
command: str,
|
41
47
|
params: SandboxParams,
|
42
48
|
sandbox: SandboxBase,
|
@@ -52,7 +58,7 @@ def run(
|
|
52
58
|
|
53
59
|
with dependency_cache([command], [artifacts], cacheable_params) as is_cached:
|
54
60
|
if not is_cached:
|
55
|
-
steps.run(
|
61
|
+
await steps.run(
|
56
62
|
command=command,
|
57
63
|
params=params,
|
58
64
|
artifacts=artifacts,
|
@@ -61,3 +67,39 @@ def run(
|
|
61
67
|
)
|
62
68
|
|
63
69
|
return artifacts.logs.run
|
70
|
+
|
71
|
+
|
72
|
+
async def run_coordinated(
|
73
|
+
interactor: steps.CoordinatedRunParams,
|
74
|
+
solution: steps.CoordinatedRunParams,
|
75
|
+
dependency_cache: DependencyCache,
|
76
|
+
) -> Tuple[Optional[RunLog], Optional[RunLog]]:
|
77
|
+
interactor.artifacts.logs = GradingLogsHolder()
|
78
|
+
solution.artifacts.logs = GradingLogsHolder()
|
79
|
+
|
80
|
+
cacheable_params = {
|
81
|
+
**_get_prefixed_cacheable_params(
|
82
|
+
interactor.params.get_cacheable_params(), 'interactor'
|
83
|
+
),
|
84
|
+
**_get_prefixed_cacheable_params(
|
85
|
+
solution.params.get_cacheable_params(), 'solution'
|
86
|
+
),
|
87
|
+
}
|
88
|
+
|
89
|
+
if interactor.metadata is not None and interactor.metadata.retryIndex is not None:
|
90
|
+
cacheable_params['interactor.__retry_index__'] = interactor.metadata.retryIndex
|
91
|
+
if solution.metadata is not None and solution.metadata.retryIndex is not None:
|
92
|
+
cacheable_params['solution.__retry_index__'] = solution.metadata.retryIndex
|
93
|
+
|
94
|
+
with dependency_cache(
|
95
|
+
[interactor.command, solution.command],
|
96
|
+
[interactor.artifacts, solution.artifacts],
|
97
|
+
cacheable_params,
|
98
|
+
) as is_cached:
|
99
|
+
if not is_cached:
|
100
|
+
await steps.run_coordinated(interactor, solution)
|
101
|
+
|
102
|
+
return (
|
103
|
+
interactor.artifacts.logs.run,
|
104
|
+
solution.artifacts.logs.run,
|
105
|
+
)
|