rbx.cp 0.13.3__py3-none-any.whl → 0.13.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. rbx/annotations.py +5 -5
  2. rbx/box/checkers.py +26 -22
  3. rbx/box/cli.py +0 -4
  4. rbx/box/code.py +27 -80
  5. rbx/box/contest/build_contest_statements.py +16 -3
  6. rbx/box/contest/schema.py +1 -2
  7. rbx/box/environment.py +16 -6
  8. rbx/box/fields.py +25 -1
  9. rbx/box/generators.py +31 -5
  10. rbx/box/global_package.py +6 -2
  11. rbx/box/header.py +31 -11
  12. rbx/box/package.py +3 -15
  13. rbx/box/presets/__init__.py +2 -2
  14. rbx/box/schema.py +4 -25
  15. rbx/box/setter_config.py +11 -0
  16. rbx/box/solutions.py +12 -4
  17. rbx/box/statements/build_statements.py +5 -1
  18. rbx/box/statements/builders.py +7 -7
  19. rbx/box/statements/schema.py +11 -2
  20. rbx/box/tasks.py +9 -4
  21. rbx/box/testcase_utils.py +2 -0
  22. rbx/box/testing/__init__.py +0 -0
  23. rbx/box/testing/testing_package.py +246 -0
  24. rbx/box/testing/testing_preset.py +36 -0
  25. rbx/box/testing/testing_shared.py +81 -0
  26. rbx/box/ui/screens/run_explorer.py +0 -8
  27. rbx/box/ui/utils/run_ui.py +7 -3
  28. rbx/box/ui/widgets/test_output_box.py +1 -1
  29. rbx/box/validators.py +5 -2
  30. rbx/grading/caching.py +67 -16
  31. rbx/grading/judge/program.py +268 -0
  32. rbx/grading/judge/sandbox.py +30 -193
  33. rbx/grading/judge/sandboxes/stupid_sandbox.py +232 -241
  34. rbx/grading/judge/sandboxes/tee.py +31 -0
  35. rbx/grading/steps.py +87 -199
  36. rbx/grading/steps_with_caching.py +15 -6
  37. rbx/resources/presets/default/problem/problem.rbx.yml +0 -2
  38. rbx/resources/presets/default/shared/contest_template.rbx.tex +1 -1
  39. rbx/resources/presets/default/shared/problem_template.rbx.tex +5 -1
  40. rbx/resources/templates/rbx.h +43 -2
  41. rbx/testing_utils.py +8 -1
  42. rbx/utils.py +59 -1
  43. {rbx_cp-0.13.3.dist-info → rbx_cp-0.13.5.dist-info}/METADATA +2 -1
  44. {rbx_cp-0.13.3.dist-info → rbx_cp-0.13.5.dist-info}/RECORD +47 -67
  45. rbx/box/conftest.py +0 -42
  46. rbx/box/generators_test.py +0 -67
  47. rbx/box/lazy_importing_test.py +0 -25
  48. rbx/box/solutions_test.py +0 -47
  49. rbx/box/validators_test.py +0 -15
  50. rbx/checker.py +0 -128
  51. rbx/clone.py +0 -197
  52. rbx/conftest.py +0 -38
  53. rbx/create.py +0 -37
  54. rbx/edit.py +0 -24
  55. rbx/grading/conftest.py +0 -33
  56. rbx/grading/judge/sandboxes/isolate.py +0 -695
  57. rbx/grading/judge/testiso.py +0 -54
  58. rbx/grading/steps_with_caching_run_test.py +0 -707
  59. rbx/grading_utils.py +0 -148
  60. rbx/hydration.py +0 -101
  61. rbx/main.py +0 -118
  62. rbx/metadata.py +0 -105
  63. rbx/resources/envs/isolate.rbx.yml +0 -36
  64. rbx/resources/presets/default/problem/sols/slow.cpp +0 -15
  65. rbx/run.py +0 -45
  66. rbx/schema.py +0 -64
  67. rbx/submit.py +0 -61
  68. rbx/test.py +0 -349
  69. rbx/testcase.py +0 -70
  70. rbx/testcase_rendering.py +0 -79
  71. {rbx_cp-0.13.3.dist-info → rbx_cp-0.13.5.dist-info}/LICENSE +0 -0
  72. {rbx_cp-0.13.3.dist-info → rbx_cp-0.13.5.dist-info}/WHEEL +0 -0
  73. {rbx_cp-0.13.3.dist-info → rbx_cp-0.13.5.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,36 @@
1
+ import pathlib
2
+
3
+ from rbx import utils
4
+ from rbx.box.presets.schema import Preset
5
+ from rbx.box.testing.testing_shared import PathOrStr, TestingShared
6
+
7
+
8
+ class TestingPreset(TestingShared):
9
+ def __init__(self, root: PathOrStr):
10
+ super().__init__(root)
11
+ self._yml = None
12
+
13
+ def initialize(self):
14
+ if not self.yml_path.exists():
15
+ self.yml_path.parent.mkdir(parents=True, exist_ok=True)
16
+ self.yml_path.touch()
17
+ self.yml_path.write_text(
18
+ utils.model_to_yaml(
19
+ Preset(uri='rsalesc/test-preset', env=pathlib.Path('env.rbx.yml'))
20
+ )
21
+ )
22
+ self.add_from_resources(
23
+ pathlib.Path('env.rbx.yml'), pathlib.Path('presets/default/env.rbx.yml')
24
+ )
25
+
26
+ def yml_path(self) -> pathlib.Path:
27
+ return self.root / 'preset.rbx.yml'
28
+
29
+ @property
30
+ def yml(self) -> Preset:
31
+ if self._yml is None:
32
+ self._yml = utils.model_from_yaml(Preset, self.yml_path.read_text())
33
+ return self._yml
34
+
35
+ def save(self):
36
+ self.yml_path.write_text(utils.model_to_yaml(self.yml))
@@ -0,0 +1,81 @@
1
+ import os
2
+ import pathlib
3
+ import shutil
4
+ import tempfile
5
+ from typing import Optional, Union
6
+
7
+ from rbx import testing_utils, utils
8
+ from rbx.config import get_resources_file
9
+ from rbx.testing_utils import get_testdata_path
10
+
11
+ PathOrStr = Union[os.PathLike, str]
12
+
13
+
14
+ class TestingShared:
15
+ def __init__(self, root: PathOrStr):
16
+ self.root = pathlib.Path(root)
17
+ self._created_tmps = []
18
+ self._old_cwd = None
19
+
20
+ def __enter__(self):
21
+ self._old_cwd = pathlib.Path.cwd()
22
+ os.chdir(self.root)
23
+ testing_utils.clear_all_functools_cache()
24
+ return self
25
+
26
+ def __exit__(self, exc_type, exc_value, traceback):
27
+ if self._old_cwd is not None:
28
+ os.chdir(self._old_cwd)
29
+ self.cleanup()
30
+
31
+ def path(self, path: PathOrStr) -> pathlib.Path:
32
+ return self.root / path
33
+
34
+ def abspath(self, path: PathOrStr) -> pathlib.Path:
35
+ return utils.abspath(self.path(path))
36
+
37
+ def mkdtemp(self) -> pathlib.Path:
38
+ temp_dir = pathlib.Path(tempfile.mkdtemp())
39
+ self._created_tmps.append(temp_dir)
40
+ return temp_dir
41
+
42
+ def cleanup(self):
43
+ for tmp in self._created_tmps:
44
+ shutil.rmtree(tmp)
45
+
46
+ def add_file(
47
+ self, path: PathOrStr, src: Optional[PathOrStr] = None
48
+ ) -> pathlib.Path:
49
+ filename = self.path(path)
50
+ filename.parent.mkdir(parents=True, exist_ok=True)
51
+ if src is not None:
52
+ self.add_from_testdata(path, src)
53
+ else:
54
+ filename.touch()
55
+ return filename
56
+
57
+ def relpath(self, path: PathOrStr) -> pathlib.Path:
58
+ return pathlib.Path(path).relative_to(self.root)
59
+
60
+ def add_from_testdata(self, path: PathOrStr, src: PathOrStr):
61
+ testdata_path = get_testdata_path()
62
+ testdata_file = testdata_path / src
63
+ if testdata_file.is_file():
64
+ shutil.copy(testdata_file, self.path(path))
65
+ elif testdata_file.is_dir():
66
+ shutil.copytree(testdata_file, self.path(path))
67
+ else:
68
+ raise ValueError(f'{testdata_file} is not a file or directory')
69
+
70
+ def add_from_resources(self, path: PathOrStr, src: PathOrStr):
71
+ resources_file = get_resources_file(pathlib.Path(src))
72
+ shutil.copy(resources_file, self.path(path))
73
+
74
+ def exists_file(self, path: PathOrStr) -> bool:
75
+ return self.path(path).exists()
76
+
77
+ def delete_file(self, path: PathOrStr):
78
+ self.path(path).unlink()
79
+
80
+ def copy_from(self, other: 'TestingShared'):
81
+ shutil.copytree(other.root, self.root, dirs_exist_ok=True, symlinks=True)
@@ -6,8 +6,6 @@ from textual.reactive import reactive
6
6
  from textual.screen import Screen
7
7
  from textual.widgets import Footer, Header, Label, ListItem, ListView
8
8
 
9
- from rbx.box import package
10
- from rbx.box.schema import TaskType
11
9
  from rbx.box.solutions import SolutionReportSkeleton
12
10
  from rbx.box.ui.screens.error import ErrorScreen
13
11
  from rbx.box.ui.screens.run_test_explorer import RunTestExplorerScreen
@@ -42,12 +40,6 @@ class RunExplorerScreen(Screen):
42
40
  tips.markup = True
43
41
  tips.display = False
44
42
  tips.border_title = 'Tips'
45
- pkg = package.find_problem_package_or_die()
46
- if pkg.type == TaskType.COMMUNICATION:
47
- tips.display = True
48
- tips.write(
49
- 'This is an interactive problem.\nYou can use the [bold blue]rbx --capture run[/bold blue] command to capture the interaction between the processes and see them here.'
50
- )
51
43
  yield tips
52
44
 
53
45
  def on_mount(self):
@@ -92,7 +92,7 @@ def get_run_testcase_metadata_markup(
92
92
  )
93
93
  lines.append(f'[b]Time:[/b] {time_str} / [b]Memory:[/b] {memory_str}')
94
94
  if checker_msg is not None:
95
- lines.append(f'[b]Checker:[/b] {checker_msg}')
95
+ lines.append(f'[b]Checker:[/b] {utils.escape_markup(checker_msg)}')
96
96
  return '\n'.join(lines)
97
97
 
98
98
 
@@ -102,7 +102,11 @@ def get_metadata_markup(entry: GenerationTestcaseEntry) -> str:
102
102
  if entry.metadata.copied_from is not None:
103
103
  lines.append(f'[b]Copied from:[/b] {entry.metadata.copied_from.inputPath}')
104
104
  if entry.metadata.generator_call is not None:
105
- lines.append(f'[b]Gen. call:[/b] {entry.metadata.generator_call}')
105
+ lines.append(
106
+ f'[b]Gen. call:[/b] {utils.escape_markup(str(entry.metadata.generator_call))}'
107
+ )
106
108
  if entry.metadata.generator_script is not None:
107
- lines.append(f'[b]Gen. script:[/b] {entry.metadata.generator_script}')
109
+ lines.append(
110
+ f'[b]Gen. script:[/b] {utils.escape_markup(str(entry.metadata.generator_script))}'
111
+ )
108
112
  return '\n'.join(lines)
@@ -44,7 +44,7 @@ class TestBoxWidget(Widget, can_focus=False):
44
44
  output: FileLog
45
45
  stderr: FileLog
46
46
  log: FileLog
47
- interaction: FileLog
47
+ interaction: InteractionBox
48
48
 
49
49
  def logs(self) -> Logs:
50
50
  return self.Logs(
rbx/box/validators.py CHANGED
@@ -8,7 +8,8 @@ from pydantic import BaseModel
8
8
  from rbx import console
9
9
  from rbx.box import package
10
10
  from rbx.box.code import SanitizationLevel, compile_item, run_item
11
- from rbx.box.schema import CodeItem, Primitive
11
+ from rbx.box.fields import Primitive
12
+ from rbx.box.schema import CodeItem
12
13
  from rbx.box.testcase_extractors import (
13
14
  GenerationTestcaseEntry,
14
15
  extract_generation_testcases_from_groups,
@@ -118,6 +119,7 @@ async def _validate_testcase(
118
119
  extra_args=shlex.join(var_args) if var_args else None,
119
120
  )
120
121
 
122
+ message = package.get_digest_as_string(message_digest.value or '')
121
123
  if (
122
124
  run_log is not None
123
125
  and run_log.exitcode != 0
@@ -127,12 +129,13 @@ async def _validate_testcase(
127
129
  f'[error]Validator [item]{validator.path}[/item] failed unexpectedly.[/error]'
128
130
  )
129
131
  console.console.print(f'[error]Summary:[/error] {run_log.get_summary()}')
132
+ console.console.print(f'[error]Message:[/error] {message}')
133
+ console.console.print(f'[error]Testcase:[/error] {testcase}')
130
134
  raise typer.Exit(1)
131
135
 
132
136
  log_overview = ''
133
137
  if log_digest.value is not None:
134
138
  log_overview = package.get_digest_as_string(log_digest.value or '')
135
- message = package.get_digest_as_string(message_digest.value or '')
136
139
  return (
137
140
  run_log is not None and run_log.exitcode == 0,
138
141
  message,
rbx/grading/caching.py CHANGED
@@ -8,6 +8,7 @@ import tempfile
8
8
  from typing import Any, Dict, List, Optional
9
9
 
10
10
  from pydantic import BaseModel
11
+ from sqlitedict import SqliteDict
11
12
 
12
13
  from rbx import console
13
14
  from rbx.grading import grading_context
@@ -87,15 +88,26 @@ def _check_digests(artifacts_list: List[GradingArtifacts]):
87
88
  produced.add(id(output.digest))
88
89
 
89
90
 
90
- def _build_digest_list(artifacts_list: List[GradingArtifacts]) -> List[DigestHolder]:
91
- digests = []
91
+ def _build_artifact_with_digest_list(
92
+ artifacts_list: List[GradingArtifacts],
93
+ ) -> List[GradingFileOutput]:
94
+ outputs = []
92
95
  for artifacts in artifacts_list:
93
96
  for output in artifacts.outputs:
94
97
  if output.hash and output.digest is None:
95
98
  output.digest = DigestHolder()
96
99
  if output.digest is None:
97
100
  continue
98
- digests.append(output.digest)
101
+ outputs.append(output)
102
+ return outputs
103
+
104
+
105
+ def _build_digest_list(artifacts_list: List[GradingArtifacts]) -> List[DigestHolder]:
106
+ outputs = _build_artifact_with_digest_list(artifacts_list)
107
+ digests = []
108
+ for output in outputs:
109
+ assert output.digest is not None
110
+ digests.append(output.digest)
99
111
  return digests
100
112
 
101
113
 
@@ -114,27 +126,44 @@ def _build_fingerprint_list(
114
126
  return fingerprints
115
127
 
116
128
 
117
- def _maybe_check_integrity(output: GradingFileOutput):
129
+ def _maybe_check_integrity(output: GradingFileOutput, integrity_digest: str):
118
130
  if not grading_context.should_check_integrity():
119
131
  return
120
- if output.dest is None or not output.dest.is_symlink():
132
+ if not output.hash:
133
+ return
134
+ if output.dest is None or not output.dest.is_symlink() or not output.dest.is_file():
135
+ # Only makes sense if the file EXISTS and IS A SYMLINK pointing to an
136
+ # EXISTING storage file.
137
+ # If the storage file ceases to exist, we can simply evict from the cache.
121
138
  return
122
- if output.digest is None or output.digest.value is None:
139
+ if output.digest is None:
123
140
  return
124
141
  with output.dest.open('rb') as f:
125
- fingerprint = digest_cooperatively(f)
126
- if fingerprint != output.digest.value:
142
+ output_digest = digest_cooperatively(f)
143
+ if output_digest != integrity_digest:
127
144
  raise ValueError(
128
145
  f'Cache was tampered with, file {output.dest} has changed since it was cached.\nPlease run `rbx clean` to reset the cache.'
129
146
  )
130
147
 
131
148
 
132
- def _build_output_fingerprint_list(artifacts_list: List[GradingArtifacts]) -> List[str]:
149
+ def _check_digest_list_integrity(
150
+ artifacts_list: List[GradingArtifacts], integrity_digests: List[Optional[str]]
151
+ ):
152
+ outputs = _build_artifact_with_digest_list(artifacts_list)
153
+ assert len(outputs) == len(integrity_digests)
154
+ for output, integrity_digest in zip(outputs, integrity_digests):
155
+ assert output.digest is not None
156
+ if integrity_digest is None:
157
+ continue
158
+ _maybe_check_integrity(output, integrity_digest)
159
+
160
+
161
+ def _build_output_fingerprint_list(
162
+ artifacts_list: List[GradingArtifacts],
163
+ ) -> List[str]:
133
164
  fingerprints = []
134
165
  for artifacts in artifacts_list:
135
166
  for output in artifacts.outputs:
136
- if output.hash:
137
- _maybe_check_integrity(output)
138
167
  if output.dest is None or output.intermediate or output.hash:
139
168
  continue
140
169
  if not output.dest.is_file():
@@ -159,7 +188,10 @@ def _build_cache_fingerprint(
159
188
  ) -> CacheFingerprint:
160
189
  digests = [digest.value for digest in _build_digest_list(artifacts_list)]
161
190
  fingerprints = _build_fingerprint_list(artifacts_list, cacher)
162
- output_fingerprints = _build_output_fingerprint_list(artifacts_list)
191
+ output_fingerprints = _build_output_fingerprint_list(
192
+ artifacts_list,
193
+ )
194
+
163
195
  logs = _build_logs_list(artifacts_list)
164
196
  return CacheFingerprint(
165
197
  digests=digests,
@@ -334,9 +366,9 @@ class DependencyCache:
334
366
  def __init__(self, root: pathlib.Path, cacher: FileCacher):
335
367
  self.root = root
336
368
  self.cacher = cacher
337
- self.db = shelve.open(self._cache_name())
369
+ self.db = SqliteDict(self._cache_name(), autocommit=True)
338
370
  tmp_dir = pathlib.Path(tempfile.mkdtemp())
339
- self.transient_db = shelve.open(str(tmp_dir / '.cache_db'))
371
+ self.transient_db = SqliteDict(str(tmp_dir / '.cache_db'), autocommit=True)
340
372
  atexit.register(lambda: self.db.close())
341
373
  atexit.register(lambda: self.transient_db.close())
342
374
  atexit.register(lambda: shutil.rmtree(tmp_dir))
@@ -388,7 +420,10 @@ class DependencyCache:
388
420
  if fingerprint is None:
389
421
  return False
390
422
 
391
- reference_fingerprint = _build_cache_fingerprint(artifact_list, self.cacher)
423
+ reference_fingerprint = _build_cache_fingerprint(
424
+ artifact_list,
425
+ self.cacher,
426
+ )
392
427
 
393
428
  if not _fingerprints_match(fingerprint, reference_fingerprint):
394
429
  self._evict_from_cache(key)
@@ -398,6 +433,11 @@ class DependencyCache:
398
433
  self._evict_from_cache(key)
399
434
  return False
400
435
 
436
+ # Check whether existing storage files were not tampered with.
437
+ _check_digest_list_integrity(
438
+ artifact_list,
439
+ fingerprint.digests,
440
+ )
401
441
  reference_digests = _build_digest_list(artifact_list)
402
442
 
403
443
  # Apply digest changes.
@@ -421,6 +461,10 @@ class DependencyCache:
421
461
  for logs, reference_logs in zip(fingerprint.logs, reference_fingerprint.logs):
422
462
  if logs.run is not None:
423
463
  reference_logs.run = logs.run.model_copy(deep=True)
464
+ if logs.interactor_run is not None:
465
+ reference_logs.interactor_run = logs.interactor_run.model_copy(
466
+ deep=True
467
+ )
424
468
  if logs.preprocess is not None:
425
469
  reference_logs.preprocess = [
426
470
  log.model_copy(deep=True) for log in logs.preprocess
@@ -447,4 +491,11 @@ class DependencyCache:
447
491
  if not are_artifacts_ok(artifact_list, self.cacher):
448
492
  return
449
493
 
450
- self._store_in_cache(key, _build_cache_fingerprint(artifact_list, self.cacher))
494
+ reference_fingerprint = _build_cache_fingerprint(
495
+ artifact_list,
496
+ self.cacher,
497
+ )
498
+ self._store_in_cache(
499
+ key,
500
+ reference_fingerprint,
501
+ )
@@ -0,0 +1,268 @@
1
+ import dataclasses
2
+ import os
3
+ import pathlib
4
+ import resource
5
+ import subprocess
6
+ import sys
7
+ import threading
8
+ import typing
9
+ from enum import Enum
10
+ from time import monotonic
11
+ from typing import IO, Any, Dict, List, Optional, Union
12
+
13
+ import psutil
14
+
15
+ from rbx.utils import PathOrStr
16
+
17
+ FileLike = Union[PathOrStr, IO[bytes], int]
18
+
19
+
20
+ def _maybe_close_files(files):
21
+ for fobj in files:
22
+ if isinstance(fobj, int):
23
+ continue
24
+ fobj.close()
25
+
26
+
27
+ def _is_pathlike(obj: Any) -> bool:
28
+ return isinstance(obj, str) or isinstance(obj, pathlib.Path)
29
+
30
+
31
+ @dataclasses.dataclass
32
+ class ProgramIO:
33
+ input: FileLike = subprocess.PIPE
34
+ output: FileLike = subprocess.PIPE
35
+ stderr: FileLike = subprocess.PIPE
36
+
37
+ def get_file_objects(self):
38
+ if isinstance(self.input, int):
39
+ input_fobj = self.input
40
+ elif _is_pathlike(self.input):
41
+ input_fobj = pathlib.Path(typing.cast(str, self.input)).open('r')
42
+ else:
43
+ input_fobj = typing.cast(IO[bytes], self.input)
44
+ if isinstance(self.output, int):
45
+ output_fobj = self.output
46
+ elif _is_pathlike(self.output):
47
+ output_path = pathlib.Path(typing.cast(str, self.output))
48
+ output_path.parent.mkdir(parents=True, exist_ok=True)
49
+ output_fobj = output_path.open('w')
50
+ else:
51
+ output_fobj = typing.cast(IO[bytes], self.output)
52
+ if isinstance(self.stderr, int):
53
+ stderr_fobj = self.stderr
54
+ elif _is_pathlike(self.stderr):
55
+ stderr_path = pathlib.Path(typing.cast(str, self.stderr))
56
+ stderr_path.parent.mkdir(parents=True, exist_ok=True)
57
+ stderr_fobj = stderr_path.open('w')
58
+ else:
59
+ stderr_fobj = typing.cast(IO[bytes], self.stderr)
60
+ return input_fobj, output_fobj, stderr_fobj
61
+
62
+
63
+ @dataclasses.dataclass
64
+ class ProgramPipes:
65
+ input: Optional[IO[bytes]] = None
66
+ output: Optional[IO[bytes]] = None
67
+ stderr: Optional[IO[bytes]] = None
68
+
69
+
70
+ @dataclasses.dataclass
71
+ class ProgramParams:
72
+ io: ProgramIO = dataclasses.field(default_factory=ProgramIO)
73
+ chdir: Optional[pathlib.Path] = None
74
+ time_limit: Optional[float] = None # seconds
75
+ wall_time_limit: Optional[float] = None # seconds
76
+ memory_limit: Optional[int] = None # megabytes
77
+ fs_limit: Optional[int] = None # kilobytes
78
+ env: Dict[str, str] = dataclasses.field(default_factory=dict)
79
+ pgid: Optional[int] = None
80
+
81
+
82
+ def get_preexec_fn(params: ProgramParams):
83
+ def preexec_fn():
84
+ os.setpgid(0, params.pgid or 0)
85
+ if params.time_limit is not None:
86
+ time_limit_in_ms = int(params.time_limit * 1000)
87
+ rlimit_cpu = int((time_limit_in_ms + 999) // 1000)
88
+ resource.setrlimit(resource.RLIMIT_CPU, (rlimit_cpu, rlimit_cpu + 1))
89
+ if params.fs_limit is not None:
90
+ fs_limit = params.fs_limit * 1024 # in bytes
91
+ resource.setrlimit(resource.RLIMIT_FSIZE, (fs_limit + 1, fs_limit * 2))
92
+
93
+ return preexec_fn
94
+
95
+
96
+ def get_memory_usage(ru: resource.struct_rusage) -> int:
97
+ if sys.platform == 'darwin':
98
+ return ru.ru_maxrss // 1024 + ru.ru_ixrss
99
+ return ru.ru_maxrss + ru.ru_ixrss + ru.ru_idrss + ru.ru_isrss
100
+
101
+
102
+ def get_cpu_time(ru: resource.struct_rusage) -> float:
103
+ return ru.ru_utime + ru.ru_stime
104
+
105
+
106
+ def get_file_sizes(io: ProgramIO):
107
+ return _get_file_size(io.output) + _get_file_size(io.stderr)
108
+
109
+
110
+ def _get_file_size(filename: Optional[FileLike]) -> int:
111
+ if filename is None or not _is_pathlike(filename):
112
+ return 0
113
+ path = pathlib.Path(typing.cast(str, filename))
114
+ if not path.is_file():
115
+ return 0
116
+ return path.stat().st_size
117
+
118
+
119
+ class ProgramCode(Enum):
120
+ RE = 'RE'
121
+ SG = 'SG'
122
+ TO = 'TO'
123
+ WT = 'WT'
124
+ ML = 'ML'
125
+ OL = 'OL'
126
+ TE = 'TE'
127
+
128
+
129
+ @dataclasses.dataclass
130
+ class ProgramResult:
131
+ exitcode: int
132
+ wall_time: float
133
+ cpu_time: float
134
+ memory_used: int
135
+ file_sizes: int
136
+ program_codes: List[ProgramCode]
137
+ killing_signal: Optional[int] = None
138
+ alarm_msg: Optional[str] = None
139
+
140
+
141
+ class Program:
142
+ def __init__(self, command: List[str], params: ProgramParams):
143
+ self.command = command
144
+ self.params = params
145
+ self.popen: Optional[subprocess.Popen] = None
146
+ self._files = []
147
+
148
+ self._stop_wall_handler = threading.Event()
149
+ self._stop_alarm_handler = threading.Event()
150
+ self._alarm_msg = ''
151
+
152
+ self._run()
153
+
154
+ @property
155
+ def pipes(self) -> ProgramPipes:
156
+ assert self.popen is not None
157
+ return ProgramPipes(
158
+ input=self.popen.stdin,
159
+ output=self.popen.stdout,
160
+ stderr=self.popen.stderr,
161
+ )
162
+
163
+ @property
164
+ def pid(self) -> int:
165
+ assert self.popen is not None
166
+ return self.popen.pid
167
+
168
+ def _kill_process(self):
169
+ if self.popen is not None:
170
+ self.popen.kill()
171
+
172
+ def _handle_wall(self):
173
+ if self._stop_wall_handler.wait(self.params.wall_time_limit):
174
+ return
175
+ self._stop_alarm_handler.set()
176
+ self._alarm_msg = 'wall timelimit'
177
+ self._kill_process()
178
+
179
+ def _handle_alarm(self):
180
+ if self._stop_alarm_handler.wait(0.3):
181
+ return
182
+ try:
183
+ process = psutil.Process(self.pid)
184
+ if self.params.time_limit is not None:
185
+ times = process.cpu_times()
186
+ cpu_time = times.user + times.system
187
+ if cpu_time > self.params.time_limit:
188
+ self._alarm_msg = 'timelimit'
189
+ self._kill_process()
190
+ if self.params.memory_limit is not None:
191
+ memory_info = process.memory_info()
192
+ memory_used = memory_info.rss
193
+ if memory_used > self.params.memory_limit * 1024 * 1024:
194
+ self._alarm_msg = 'memorylimit'
195
+ self._kill_process()
196
+ self._stop_alarm_handler.clear()
197
+ self._handle_alarm()
198
+ except psutil.NoSuchProcess:
199
+ return
200
+
201
+ def _run(self):
202
+ self._files = self.params.io.get_file_objects()
203
+ self.popen = subprocess.Popen(
204
+ self.command,
205
+ stdin=self._files[0],
206
+ stdout=self._files[1],
207
+ stderr=self._files[2],
208
+ cwd=self.params.chdir,
209
+ env={**os.environ, **self.params.env},
210
+ preexec_fn=get_preexec_fn(self.params),
211
+ close_fds=True,
212
+ )
213
+ self.start_time = monotonic()
214
+
215
+ threading.Thread(target=self._handle_wall, daemon=True).start()
216
+ threading.Thread(target=self._handle_alarm, daemon=True).start()
217
+
218
+ def process_exit(self, exitstatus, ru) -> ProgramResult:
219
+ wall_time = monotonic() - self.start_time
220
+ cpu_time = get_cpu_time(ru)
221
+ memory_used = get_memory_usage(ru)
222
+ file_sizes = get_file_sizes(self.params.io)
223
+ exitcode = os.waitstatus_to_exitcode(exitstatus)
224
+ killing_signal = None
225
+ program_codes = []
226
+
227
+ if exitcode < 0:
228
+ killing_signal = -exitcode
229
+ program_codes.append(ProgramCode.SG)
230
+ if exitcode > 0:
231
+ program_codes.append(ProgramCode.RE)
232
+ if self.params.time_limit is not None and (
233
+ cpu_time > self.params.time_limit or -exitcode == 24
234
+ ):
235
+ program_codes.append(ProgramCode.TO)
236
+ if (
237
+ self.params.wall_time_limit is not None
238
+ and wall_time > self.params.wall_time_limit
239
+ ):
240
+ program_codes.append(ProgramCode.WT)
241
+ program_codes.append(ProgramCode.TO)
242
+ if (
243
+ self.params.memory_limit is not None
244
+ and memory_used > self.params.memory_limit * 1024 * 1024
245
+ or self._alarm_msg == 'memorylimit'
246
+ ):
247
+ program_codes.append(ProgramCode.ML)
248
+ if (
249
+ self.params.fs_limit is not None
250
+ and file_sizes > self.params.fs_limit * 1024
251
+ ):
252
+ program_codes.append(ProgramCode.OL)
253
+
254
+ return ProgramResult(
255
+ exitcode=exitcode,
256
+ wall_time=wall_time,
257
+ cpu_time=cpu_time,
258
+ memory_used=memory_used,
259
+ file_sizes=file_sizes,
260
+ program_codes=program_codes,
261
+ killing_signal=killing_signal,
262
+ alarm_msg=self._alarm_msg or None,
263
+ )
264
+
265
+ def wait(self):
266
+ assert self.popen is not None
267
+ _, exitstatus, ru = os.wait4(self.pid, 0)
268
+ return self.process_exit(exitstatus, ru)