rbx.cp 0.6.1__py3-none-any.whl → 0.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. rbx/box/cd.py +32 -4
  2. rbx/box/cli.py +82 -34
  3. rbx/box/code.py +131 -82
  4. rbx/box/contest/main.py +25 -0
  5. rbx/box/creation.py +3 -0
  6. rbx/box/global_package.py +74 -0
  7. rbx/box/linting.py +76 -7
  8. rbx/box/package.py +6 -19
  9. rbx/box/presets/__init__.py +4 -4
  10. rbx/box/remote.py +19 -0
  11. rbx/box/sanitizers/warning_stack.py +3 -3
  12. rbx/box/solutions.py +13 -7
  13. rbx/box/stats.py +10 -0
  14. rbx/box/stresses.py +45 -64
  15. rbx/box/stressing/finder_parser.py +11 -16
  16. rbx/box/tasks.py +33 -22
  17. rbx/box/tooling/boca/scraper.py +1 -1
  18. rbx/grading/caching.py +98 -47
  19. rbx/grading/debug_context.py +31 -0
  20. rbx/grading/grading_context.py +96 -0
  21. rbx/grading/judge/cacher.py +93 -21
  22. rbx/grading/judge/sandbox.py +6 -3
  23. rbx/grading/judge/sandboxes/timeit.py +1 -1
  24. rbx/grading/judge/storage.py +169 -35
  25. rbx/grading/profiling.py +126 -0
  26. rbx/grading/steps.py +44 -16
  27. rbx/grading/steps_with_caching.py +52 -26
  28. rbx/resources/presets/default/contest/.gitignore +2 -0
  29. rbx/resources/presets/default/contest/contest.rbx.yml +18 -4
  30. rbx/resources/presets/default/contest/statement/contest.rbx.tex +25 -86
  31. rbx/resources/presets/default/contest/statement/icpc.sty +322 -0
  32. rbx/resources/presets/default/contest/statement/instructions.tex +40 -0
  33. rbx/resources/presets/default/contest/statement/logo.png +0 -0
  34. rbx/resources/presets/default/contest/statement/template.rbx.tex +45 -36
  35. rbx/resources/presets/default/preset.rbx.yml +8 -6
  36. rbx/resources/presets/default/problem/problem.rbx.yml +20 -17
  37. rbx/resources/presets/default/problem/statement/icpc.sty +322 -0
  38. rbx/resources/presets/default/problem/statement/template.rbx.tex +47 -79
  39. {rbx_cp-0.6.1.dist-info → rbx_cp-0.8.0.dist-info}/METADATA +4 -1
  40. {rbx_cp-0.6.1.dist-info → rbx_cp-0.8.0.dist-info}/RECORD +48 -41
  41. rbx/resources/presets/default/contest/statement/olymp.sty +0 -250
  42. rbx/resources/presets/default/problem/statement/olymp.sty +0 -250
  43. /rbx/resources/presets/default/problem/{gen.cpp → gens/gen.cpp} +0 -0
  44. /rbx/resources/presets/default/problem/{tests → manual_tests}/samples/000.in +0 -0
  45. /rbx/resources/presets/default/problem/{tests → manual_tests}/samples/001.in +0 -0
  46. /rbx/resources/presets/default/problem/{random.py → testplan/random.py} +0 -0
  47. /rbx/resources/presets/default/problem/{random.txt → testplan/random.txt} +0 -0
  48. {rbx_cp-0.6.1.dist-info → rbx_cp-0.8.0.dist-info}/LICENSE +0 -0
  49. {rbx_cp-0.6.1.dist-info → rbx_cp-0.8.0.dist-info}/WHEEL +0 -0
  50. {rbx_cp-0.6.1.dist-info → rbx_cp-0.8.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,74 @@
1
+ import functools
2
+ import pathlib
3
+ import shutil
4
+
5
+ from rbx.config import get_app_path
6
+ from rbx.grading.caching import DependencyCache
7
+ from rbx.grading.judge.cacher import FileCacher
8
+ from rbx.grading.judge.sandbox import SandboxBase
9
+ from rbx.grading.judge.sandboxes.stupid_sandbox import StupidSandbox
10
+ from rbx.grading.judge.storage import FilesystemStorage, Storage
11
+
12
+ CACHE_STEP_VERSION = 3
13
+
14
+
15
+ def get_cache_fingerprint() -> str:
16
+ return f'{CACHE_STEP_VERSION}'
17
+
18
+
19
+ @functools.cache
20
+ def is_cache_valid(cache_dir: pathlib.Path) -> bool:
21
+ if not cache_dir.is_dir():
22
+ return True
23
+ fingerprint_file = cache_dir / 'fingerprint'
24
+ if not fingerprint_file.is_file():
25
+ return False
26
+ fingerprint = fingerprint_file.read_text()
27
+ if fingerprint.strip() != get_cache_fingerprint():
28
+ return False
29
+ return True
30
+
31
+
32
+ @functools.cache
33
+ def get_global_cache_dir() -> pathlib.Path:
34
+ cache_dir = get_app_path() / '.box'
35
+ cache_dir.mkdir(parents=True, exist_ok=True)
36
+ fingerprint_file = cache_dir / 'fingerprint'
37
+ if not fingerprint_file.is_file():
38
+ fingerprint_file.write_text(get_cache_fingerprint())
39
+ return cache_dir
40
+
41
+
42
+ def is_global_cache_valid() -> bool:
43
+ return is_cache_valid(get_global_cache_dir())
44
+
45
+
46
+ @functools.cache
47
+ def get_global_storage_dir() -> pathlib.Path:
48
+ storage_dir = get_global_cache_dir() / '.storage'
49
+ storage_dir.mkdir(parents=True, exist_ok=True)
50
+ return storage_dir
51
+
52
+
53
+ @functools.cache
54
+ def get_global_cache_storage() -> Storage:
55
+ return FilesystemStorage(get_global_storage_dir())
56
+
57
+
58
+ @functools.cache
59
+ def get_global_file_cacher() -> FileCacher:
60
+ return FileCacher(get_global_cache_storage())
61
+
62
+
63
+ @functools.cache
64
+ def get_global_dependency_cache() -> DependencyCache:
65
+ return DependencyCache(get_global_cache_dir(), get_global_file_cacher())
66
+
67
+
68
+ @functools.cache
69
+ def get_global_sandbox() -> SandboxBase:
70
+ return StupidSandbox(get_global_file_cacher())
71
+
72
+
73
+ def clear_global_cache():
74
+ shutil.rmtree(get_global_cache_dir(), ignore_errors=True)
rbx/box/linting.py CHANGED
@@ -1,26 +1,95 @@
1
+ import difflib
1
2
  import pathlib
3
+ from typing import Optional, Type
2
4
 
3
5
  import yamlfix
4
6
  import yamlfix.model
7
+ from pydantic import BaseModel
5
8
 
6
9
  from rbx import console
7
- from rbx.box.cd import is_contest_package, is_problem_package
10
+ from rbx.box.cd import is_contest_package, is_preset_package, is_problem_package
11
+ from rbx.box.contest.schema import Contest
12
+ from rbx.box.presets import get_preset_yaml
13
+ from rbx.box.presets.schema import Preset
14
+ from rbx.box.schema import Package
8
15
  from rbx.box.stats import find_problem_packages_from_contest
16
+ from rbx.utils import uploaded_schema_path
9
17
 
10
18
 
11
- def fix_yaml(path: pathlib.Path, verbose: bool = True):
12
- config = yamlfix.model.YamlfixConfig(quote_basic_values=True)
19
+ def fix_language_server(path: pathlib.Path, model_cls: Type[BaseModel]) -> bool:
20
+ stream = []
21
+ with path.open('r') as f:
22
+ for line in f:
23
+ if line.strip().startswith('# yaml-language-server:'):
24
+ continue
25
+ stream.append(line)
26
+ if line.startswith('---'):
27
+ stream.append(
28
+ f'# yaml-language-server: $schema={uploaded_schema_path(model_cls)}\n'
29
+ )
30
+ content = ''.join(stream)
31
+ orig_text = path.read_text()
32
+ path.write_text(content)
33
+ return orig_text != content
34
+
35
+
36
+ def fix_yaml(
37
+ path: pathlib.Path,
38
+ verbose: bool = True,
39
+ print_diff: bool = False,
40
+ model_cls: Optional[Type[BaseModel]] = None,
41
+ ):
42
+ orig_text = path.read_text()
43
+
44
+ # Config to go hand-to-hand with VSCode YAML extension,
45
+ # which we offer first class support to. Unfortunately,
46
+ # YAML extension is not perfect :(
47
+ config = yamlfix.model.YamlfixConfig(
48
+ quote_basic_values=True,
49
+ quote_representation='"',
50
+ comments_min_spaces_from_content=1,
51
+ )
13
52
  _, changed = yamlfix.fix_files([str(path)], dry_run=False, config=config)
53
+
54
+ if model_cls is not None:
55
+ if fix_language_server(path, model_cls):
56
+ changed = True
57
+
14
58
  if changed and verbose:
15
59
  console.console.print(
16
60
  f'Formatting [item]{path}[/item].',
17
61
  )
18
62
 
63
+ if print_diff and changed:
64
+ unified_diff = difflib.unified_diff(
65
+ orig_text.splitlines(), path.read_text().splitlines()
66
+ )
67
+ console.console.print(
68
+ f'Diff for [item]{path}[/item].\n' + '\n'.join(unified_diff),
69
+ )
70
+
71
+
72
+ def fix_package(root: pathlib.Path = pathlib.Path(), print_diff: bool = False):
73
+ if is_preset_package(root):
74
+ fix_yaml(root / 'preset.rbx.yml', model_cls=Preset, print_diff=print_diff)
75
+ preset = get_preset_yaml(root)
76
+ if preset.problem is not None:
77
+ fix_yaml(
78
+ root / preset.problem / 'problem.rbx.yml',
79
+ model_cls=Package,
80
+ print_diff=print_diff,
81
+ )
82
+ if preset.contest is not None:
83
+ fix_package(root / preset.contest, print_diff=print_diff)
84
+ return
19
85
 
20
- def fix_package(root: pathlib.Path = pathlib.Path()):
21
86
  if is_problem_package(root):
22
- fix_yaml(root / 'problem.rbx.yml')
87
+ fix_yaml(root / 'problem.rbx.yml', model_cls=Package, print_diff=print_diff)
23
88
  if is_contest_package(root):
24
- fix_yaml(root / 'contest.rbx.yml')
89
+ fix_yaml(root / 'contest.rbx.yml', model_cls=Contest, print_diff=print_diff)
25
90
  for problem in find_problem_packages_from_contest(root):
26
- fix_yaml(problem / 'problem.rbx.yml')
91
+ fix_yaml(
92
+ problem / 'problem.rbx.yml',
93
+ model_cls=Package,
94
+ print_diff=print_diff,
95
+ )
rbx/box/package.py CHANGED
@@ -11,8 +11,9 @@ import typer
11
11
  from pydantic import ValidationError
12
12
 
13
13
  from rbx import console, utils
14
- from rbx.box import cd
14
+ from rbx.box import cd, global_package
15
15
  from rbx.box.environment import get_sandbox_type
16
+ from rbx.box.global_package import get_cache_fingerprint
16
17
  from rbx.box.schema import (
17
18
  CodeItem,
18
19
  ExpectedOutcome,
@@ -34,7 +35,6 @@ YAML_NAME = 'problem.rbx.yml'
34
35
  _DEFAULT_CHECKER = 'wcmp.cpp'
35
36
  _NOOP_CHECKER = 'noop.cpp'
36
37
  TEMP_DIR = None
37
- CACHE_STEP_VERSION = 1
38
38
 
39
39
 
40
40
  @functools.cache
@@ -107,17 +107,13 @@ def get_ruyaml(root: pathlib.Path = pathlib.Path()) -> Tuple[ruyaml.YAML, ruyaml
107
107
  return res, res.load(problem_yaml_path.read_text())
108
108
 
109
109
 
110
- def _get_fingerprint() -> str:
111
- return f'{CACHE_STEP_VERSION}'
112
-
113
-
114
110
  @functools.cache
115
111
  def get_problem_cache_dir(root: pathlib.Path = pathlib.Path()) -> pathlib.Path:
116
112
  cache_dir = find_problem(root) / '.box'
117
113
  cache_dir.mkdir(parents=True, exist_ok=True)
118
114
  fingerprint_file = cache_dir / 'fingerprint'
119
115
  if not fingerprint_file.is_file():
120
- fingerprint_file.write_text(_get_fingerprint())
116
+ fingerprint_file.write_text(get_cache_fingerprint())
121
117
  return cache_dir
122
118
 
123
119
 
@@ -170,12 +166,12 @@ def get_problem_preprocessed_path(
170
166
 
171
167
  @functools.cache
172
168
  def get_cache_storage(root: pathlib.Path = pathlib.Path()) -> Storage:
173
- return FilesystemStorage(get_problem_storage_dir(root))
169
+ return FilesystemStorage(get_problem_storage_dir(root), compress=False)
174
170
 
175
171
 
176
172
  @functools.cache
177
173
  def get_dependency_cache(root: pathlib.Path = pathlib.Path()) -> DependencyCache:
178
- return DependencyCache(get_problem_cache_dir(root), get_cache_storage(root))
174
+ return DependencyCache(get_problem_cache_dir(root), get_file_cacher(root))
179
175
 
180
176
 
181
177
  @functools.cache
@@ -427,16 +423,7 @@ def get_merged_capture_path(root: pathlib.Path = pathlib.Path()) -> pathlib.Path
427
423
  @functools.cache
428
424
  def is_cache_valid(root: pathlib.Path = pathlib.Path()):
429
425
  cache_dir = find_problem(root) / '.box'
430
- if not cache_dir.is_dir():
431
- return True
432
-
433
- fingerprint_file = cache_dir / 'fingerprint'
434
- if not fingerprint_file.is_file():
435
- return False
436
- fingerprint = fingerprint_file.read_text()
437
- if fingerprint.strip() != _get_fingerprint():
438
- return False
439
- return True
426
+ return global_package.is_cache_valid(cache_dir)
440
427
 
441
428
 
442
429
  def clear_package_cache():
@@ -23,7 +23,7 @@ def _find_preset_yaml(root: pathlib.Path = pathlib.Path()) -> Optional[pathlib.P
23
23
  return None
24
24
 
25
25
 
26
- def _get_preset_yaml(root: pathlib.Path = pathlib.Path()) -> Preset:
26
+ def get_preset_yaml(root: pathlib.Path = pathlib.Path()) -> Preset:
27
27
  found = _find_preset_yaml(root)
28
28
  if not found:
29
29
  console.console.print(
@@ -221,7 +221,7 @@ def _copy_updated_assets(
221
221
  def get_active_preset_or_null(root: pathlib.Path = pathlib.Path()) -> Optional[Preset]:
222
222
  local_preset = _find_local_preset(root)
223
223
  if local_preset is not None:
224
- return _get_preset_yaml(local_preset)
224
+ return get_preset_yaml(local_preset)
225
225
  return None
226
226
 
227
227
 
@@ -319,7 +319,7 @@ def _install_preset_from_dir(
319
319
  update: bool = False,
320
320
  override_uri: Optional[str] = None,
321
321
  ):
322
- preset = _get_preset_yaml(src)
322
+ preset = get_preset_yaml(src)
323
323
 
324
324
  if ensure_contest and preset.contest is None:
325
325
  console.console.print(
@@ -391,7 +391,7 @@ def _install_preset_from_local_dir(
391
391
  update: bool = False,
392
392
  ):
393
393
  pd = pathlib.Path(fetch_info.inner_dir)
394
- preset = _get_preset_yaml(pd)
394
+ preset = get_preset_yaml(pd)
395
395
  console.console.print(
396
396
  f'Installing local preset [item]{preset.name}[/item] into [item]{dest}[/item]...'
397
397
  )
rbx/box/remote.py CHANGED
@@ -27,6 +27,16 @@ class Expander(ABC):
27
27
  pass
28
28
 
29
29
 
30
+ class MainExpander(Expander):
31
+ def expand(self, path: pathlib.Path) -> Optional[pathlib.Path]:
32
+ if str(path) != '@main':
33
+ return None
34
+ sol = package.get_main_solution()
35
+ if sol is None:
36
+ return None
37
+ return sol.path
38
+
39
+
30
40
  class BocaExpander(Expander):
31
41
  BOCA_REGEX = re.compile(r'\@boca\/(\d+)(?:\-(\d+))?')
32
42
 
@@ -69,6 +79,7 @@ class BocaExpander(Expander):
69
79
 
70
80
 
71
81
  REGISTERED_EXPANDERS: List['Expander'] = [
82
+ MainExpander(),
72
83
  BocaExpander(),
73
84
  ]
74
85
 
@@ -132,6 +143,9 @@ def _expand_paths(paths: List[pathlib.Path]) -> List[pathlib.Path]:
132
143
  continue
133
144
  expanded = _expand_path(path)
134
145
  if expanded is None:
146
+ console.console.print(
147
+ f'[warning]Remote solution [item]{path}[/item] could not be expanded. Skipping.[/warning]'
148
+ )
135
149
  continue
136
150
  res.append(expanded)
137
151
  return res
@@ -149,3 +163,8 @@ def expand_file(file: str) -> pathlib.Path:
149
163
  )
150
164
  raise typer.Exit(1)
151
165
  return res[0]
166
+
167
+
168
+ def is_path_remote(path: pathlib.Path) -> bool:
169
+ remote_dir = package.get_problem_remote_dir()
170
+ return path.resolve().is_relative_to(remote_dir.resolve())
@@ -4,7 +4,7 @@ import shutil
4
4
 
5
5
  from rbx import console
6
6
  from rbx.box.schema import CodeItem
7
- from rbx.grading.judge.storage import Storage
7
+ from rbx.grading.judge.cacher import FileCacher
8
8
  from rbx.grading.steps import GradingFileOutput
9
9
 
10
10
 
@@ -18,7 +18,7 @@ class WarningStack:
18
18
  self.warnings.add(code.path)
19
19
 
20
20
  def add_sanitizer_warning(
21
- self, storage: Storage, code: CodeItem, reference: GradingFileOutput
21
+ self, cacher: FileCacher, code: CodeItem, reference: GradingFileOutput
22
22
  ):
23
23
  if code.path in self.sanitizer_warnings:
24
24
  return
@@ -26,7 +26,7 @@ class WarningStack:
26
26
  code.path.with_suffix(code.path.suffix + '.log')
27
27
  )
28
28
  dest_path.parent.mkdir(parents=True, exist_ok=True)
29
- f = reference.get_file(storage)
29
+ f = reference.get_file(cacher)
30
30
  if f is None:
31
31
  return
32
32
  with dest_path.open('wb') as fout:
rbx/box/solutions.py CHANGED
@@ -13,6 +13,7 @@ import rich.markup
13
13
  import rich.table
14
14
  import rich.text
15
15
  import typer
16
+ from ordered_set import OrderedSet
16
17
  from pydantic import BaseModel
17
18
 
18
19
  from rbx import console, utils
@@ -245,7 +246,7 @@ async def convert_list_of_solution_evaluations_to_dict(
245
246
 
246
247
 
247
248
  def _get_solutions_for_skeleton(
248
- tracked_solutions: Optional[Set[str]] = None,
249
+ tracked_solutions: Optional[Iterable[str]] = None,
249
250
  verification: VerificationLevel = VerificationLevel.NONE,
250
251
  ) -> List[Solution]:
251
252
  pkg = package.find_problem_package_or_die()
@@ -260,7 +261,7 @@ def _get_solutions_for_skeleton(
260
261
 
261
262
 
262
263
  def _get_report_skeleton(
263
- tracked_solutions: Optional[Set[str]] = None,
264
+ tracked_solutions: Optional[Iterable[str]] = None,
264
265
  verification: VerificationLevel = VerificationLevel.NONE,
265
266
  timelimit_override: Optional[int] = None,
266
267
  ) -> SolutionReportSkeleton:
@@ -393,7 +394,7 @@ def print_best_output(output_files: List[pathlib.Path], empty_warning: bool = Fa
393
394
 
394
395
  def run_solutions(
395
396
  progress: Optional[StatusProgress] = None,
396
- tracked_solutions: Optional[Set[str]] = None,
397
+ tracked_solutions: Optional[Iterable[str]] = None,
397
398
  verification: VerificationLevel = VerificationLevel.NONE,
398
399
  check: bool = True,
399
400
  timelimit_override: Optional[int] = None,
@@ -605,7 +606,7 @@ def _run_interactive_solutions(
605
606
 
606
607
 
607
608
  def _get_interactive_skeleton(
608
- tracked_solutions: Optional[Set[str]] = None,
609
+ tracked_solutions: Optional[Iterable[str]] = None,
609
610
  verification: VerificationLevel = VerificationLevel.NONE,
610
611
  ) -> SolutionReportSkeleton:
611
612
  solutions = _get_solutions_for_skeleton(tracked_solutions, verification)
@@ -645,7 +646,7 @@ def _get_interactive_skeleton(
645
646
 
646
647
  async def run_and_print_interactive_solutions(
647
648
  progress: Optional[StatusProgress] = None,
648
- tracked_solutions: Optional[Set[str]] = None,
649
+ tracked_solutions: Optional[Iterable[str]] = None,
649
650
  verification: VerificationLevel = VerificationLevel.NONE,
650
651
  generator: Optional[GeneratorCall] = None,
651
652
  testcase_entry: Optional[TestcaseEntry] = None,
@@ -739,7 +740,12 @@ def expand_solutions_with_source(sols: List[str]) -> List[Tuple[Solution, bool]]
739
740
  path_sols = remote.expand_files(sols)
740
741
 
741
742
  # Ensure sols exist.
742
- path_sols = [sol for sol in path_sols if sol.is_file()]
743
+ for sol in path_sols:
744
+ if not sol.is_file():
745
+ console.console.print(
746
+ f'[error]Solution [item]{sol}[/item] could not be found.[/error]'
747
+ )
748
+ raise typer.Exit(1)
743
749
 
744
750
  seen_sols = set()
745
751
  res: List[Tuple[Solution, bool]] = []
@@ -762,7 +768,7 @@ def expand_solutions(sols: List[str]) -> List[Solution]:
762
768
 
763
769
 
764
770
  async def pick_solutions(
765
- tracked_solutions: Optional[Set[str]],
771
+ tracked_solutions: Optional[OrderedSet[str]],
766
772
  extra_solutions: Optional[List[str]] = None,
767
773
  ) -> List[str]:
768
774
  pkg = package.find_problem_package_or_die()
rbx/box/stats.py CHANGED
@@ -78,6 +78,14 @@ def print_package_stats(root: pathlib.Path = pathlib.Path()) -> int:
78
78
  return cache_size + build_size
79
79
 
80
80
 
81
+ def print_global_stats() -> int:
82
+ cache_size = get_cache_size()
83
+ console.console.print(
84
+ f'[status]Global cache size[/status]: [item]{get_formatted_memory(cache_size)}[/item]'
85
+ )
86
+ return cache_size
87
+
88
+
81
89
  def print_reachable_package_stats(root: pathlib.Path = pathlib.Path()) -> None:
82
90
  contest_packages, problem_packages = find_and_group_all_reachable_packages(root)
83
91
  total_size = 0
@@ -87,6 +95,8 @@ def print_reachable_package_stats(root: pathlib.Path = pathlib.Path()) -> None:
87
95
  for pkg in problem_packages:
88
96
  total_size += print_package_stats(pkg)
89
97
  console.console.print()
98
+
99
+ total_size += print_global_stats()
90
100
  console.console.print(
91
101
  f'[status]Total size[/status]: [item]{get_formatted_memory(total_size)}[/item]'
92
102
  )
rbx/box/stresses.py CHANGED
@@ -1,3 +1,5 @@
1
+ import pathlib
2
+ import shutil
1
3
  import time
2
4
  from shutil import rmtree
3
5
  from typing import List, Optional
@@ -8,24 +10,19 @@ import typer
8
10
  from pydantic import BaseModel
9
11
 
10
12
  from rbx import console
11
- from rbx.box import checkers, generators, package, validators
12
- from rbx.box.code import SanitizationLevel, compile_item, run_item
13
+ from rbx.box import checkers, generators, package, tasks, validators
14
+ from rbx.box.code import SanitizationLevel, compile_item
13
15
  from rbx.box.generators import (
14
16
  GenerationMetadata,
15
17
  expand_generator_call,
16
18
  generate_standalone,
17
19
  )
18
- from rbx.box.retries import Retrier
19
20
  from rbx.box.schema import CodeItem, GeneratorCall, Stress, TaskType, Testcase
20
21
  from rbx.box.solutions import compile_solutions, get_outcome_style_verdict
21
22
  from rbx.box.stressing import finder_parser
22
23
  from rbx.grading.steps import (
23
- DigestOrDest,
24
- DigestOrSource,
25
24
  Evaluation,
26
25
  Outcome,
27
- TestcaseIO,
28
- TestcaseLog,
29
26
  )
30
27
  from rbx.utils import StatusProgress
31
28
 
@@ -61,11 +58,6 @@ async def run_stress(
61
58
  sanitized: bool = False,
62
59
  ) -> StressReport:
63
60
  pkg = package.find_problem_package_or_die()
64
- if pkg.type == TaskType.COMMUNICATION:
65
- console.console.print(
66
- '[error]Communication problems do not support stress testing.[/error]'
67
- )
68
- raise typer.Exit(1)
69
61
 
70
62
  if finder:
71
63
  if generator_call is None:
@@ -90,6 +82,8 @@ async def run_stress(
90
82
  call = stress.generator
91
83
  generator = package.get_generator(call.name)
92
84
 
85
+ if progress:
86
+ progress.update('Compiling generator...')
93
87
  try:
94
88
  generator_digest = compile_item(generator, sanitized=SanitizationLevel.PREFER)
95
89
  except:
@@ -109,11 +103,18 @@ async def run_stress(
109
103
  solutions_digest = compile_solutions(
110
104
  tracked_solutions=set(str(solution.path) for solution in solutions),
111
105
  sanitized=sanitized,
106
+ progress=progress,
112
107
  )
113
108
  if progress:
114
109
  progress.update('Compiling finders...')
115
110
  finders_digest = {str(finder.path): _compile_finder(finder) for finder in finders}
116
111
 
112
+ interactor_digest = None
113
+ if pkg.type == TaskType.COMMUNICATION:
114
+ interactor_digest = checkers.compile_interactor(progress=progress)
115
+
116
+ if progress:
117
+ progress.update('Compiling validator...')
117
118
  compiled_validator = validators.compile_main_validator()
118
119
 
119
120
  # Erase old stress directory
@@ -158,47 +159,40 @@ async def run_stress(
158
159
  else None,
159
160
  )
160
161
 
161
- @async_lru.alru_cache
162
+ @async_lru.alru_cache(maxsize=None)
162
163
  async def run_solution_fn(
163
164
  solution: str,
164
- retry_index: Optional[int] = None,
165
+ checker_digest: Optional[str] = None,
165
166
  input_path=input_path,
166
- ) -> TestcaseLog:
167
+ output_path: Optional[pathlib.Path] = None,
168
+ ) -> Evaluation:
167
169
  index = solution_indices[solution]
168
170
  sol = solutions[index]
169
- output_path = input_path.with_stem(f'{index}').with_suffix('.out')
170
- stderr_path = output_path.with_suffix('.err')
171
-
172
- run_log = await run_item(
173
- sol,
174
- DigestOrSource.create(solutions_digest[sol.path]),
175
- stdin=DigestOrSource.create(input_path),
176
- stdout=DigestOrDest.create(output_path),
177
- stderr=DigestOrDest.create(stderr_path),
178
- retry_index=retry_index,
179
- )
180
-
181
- return TestcaseLog(
182
- **(run_log.model_dump() if run_log is not None else {}),
183
- stdout_absolute_path=output_path.absolute(),
184
- stderr_absolute_path=stderr_path.absolute(),
171
+ return await tasks.run_solution_on_testcase(
172
+ solutions[index],
173
+ compiled_digest=solutions_digest[sol.path],
174
+ checker_digest=checker_digest,
175
+ interactor_digest=interactor_digest,
176
+ testcase=Testcase(inputPath=input_path, outputPath=output_path),
177
+ output_dir=input_path.parent,
178
+ filestem=f'{index}',
179
+ is_stress=True,
185
180
  )
186
181
 
187
182
  # Get main solution output.
188
183
  expected_output_path = empty_path
189
184
  if needs_expected_output:
190
- main_testcase_log = await run_solution_fn(str(solutions[0].path))
191
- main_checker_result = checkers.check_with_no_output(main_testcase_log)
192
- if main_checker_result.outcome != Outcome.ACCEPTED:
185
+ eval = await run_solution_fn(str(solutions[0].path))
186
+ if eval.result.outcome != Outcome.ACCEPTED:
193
187
  console.console.print(
194
188
  '[error]Error while generating main solution output.[/error]'
195
189
  )
196
190
  console.console.print(f'Input written at [item]{input_path}[/item]')
197
191
  console.console.print(
198
- f'Output written at [item]{main_testcase_log.stdout_absolute_path}[/item]'
192
+ f'Output written at [item]{eval.log.stdout_absolute_path}[/item]'
199
193
  )
200
194
  console.console.print(
201
- f'Stderr written at [item]{main_testcase_log.stderr_absolute_path}[/item]'
195
+ f'Stderr written at [item]{eval.log.stderr_absolute_path}[/item]'
202
196
  )
203
197
  console.console.print()
204
198
  console.console.print(
@@ -206,44 +200,31 @@ async def run_stress(
206
200
  "use the two-way modifier in your finder expression (':2')."
207
201
  )
208
202
  raise typer.Exit(1)
209
- expected_output_path = main_testcase_log.stdout_absolute_path
203
+ if eval.log.stdout_absolute_path is not None:
204
+ expected_output_path = input_path.with_suffix('.ans')
205
+ shutil.copyfile(eval.log.stdout_absolute_path, expected_output_path)
206
+ else:
207
+ expected_output_path = None
210
208
 
211
- @async_lru.alru_cache
209
+ @async_lru.alru_cache(maxsize=None)
212
210
  async def run_solution_and_checker_fn(
213
211
  call: finder_parser.FinderCall,
214
- input_path=input_path,
215
212
  expected_output_path=expected_output_path,
216
213
  ) -> finder_parser.FinderResult:
217
- async def run_fn(retry_index: int) -> Evaluation:
214
+ async def run_fn() -> Evaluation:
218
215
  solution = call.solution
219
216
  checker = call.checker
220
217
 
221
- testcase_log = await run_solution_fn(solution, retry_index=retry_index)
222
- assert testcase_log.stdout_absolute_path is not None
223
-
224
- if checker is None:
225
- checker_result = checkers.check_with_no_output(testcase_log)
226
- else:
227
- checker_digest = finders_digest[checker.path]
228
- checker_result = await checkers.check(
229
- checker_digest,
230
- testcase_log,
231
- Testcase(inputPath=input_path, outputPath=expected_output_path),
232
- program_output=testcase_log.stdout_absolute_path,
233
- )
234
-
235
- return Evaluation(
236
- result=checker_result,
237
- testcase=TestcaseIO(
238
- index=0,
239
- input=input_path,
240
- output=expected_output_path,
241
- ),
242
- log=testcase_log,
218
+ checker_digest = (
219
+ finders_digest[checker.path] if checker is not None else None
220
+ )
221
+ return await run_solution_fn(
222
+ solution,
223
+ checker_digest=checker_digest,
224
+ output_path=expected_output_path,
243
225
  )
244
226
 
245
- retrier = Retrier(is_stress=True)
246
- eval = await retrier.repeat(run_fn)
227
+ eval = await run_fn()
247
228
 
248
229
  return finder_parser.FinderResult(
249
230
  solution=call.solution,