rbx.cp 0.13.4__py3-none-any.whl → 0.13.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. rbx/box/checkers.py +2 -9
  2. rbx/box/cli.py +0 -1
  3. rbx/box/code.py +27 -80
  4. rbx/box/environment.py +16 -6
  5. rbx/box/generators.py +26 -3
  6. rbx/box/global_package.py +1 -1
  7. rbx/box/header.py +26 -8
  8. rbx/box/package.py +0 -14
  9. rbx/box/setter_config.py +11 -0
  10. rbx/box/solutions.py +12 -4
  11. rbx/box/tasks.py +9 -4
  12. rbx/box/testing/testing_package.py +69 -2
  13. rbx/box/ui/screens/run_explorer.py +0 -8
  14. rbx/box/ui/utils/run_ui.py +7 -3
  15. rbx/box/ui/widgets/test_output_box.py +1 -1
  16. rbx/box/unit.py +4 -4
  17. rbx/box/validators.py +3 -1
  18. rbx/grading/caching.py +65 -15
  19. rbx/grading/judge/cacher.py +5 -3
  20. rbx/grading/judge/program.py +300 -0
  21. rbx/grading/judge/sandbox.py +30 -200
  22. rbx/grading/judge/sandboxes/stupid_sandbox.py +234 -240
  23. rbx/grading/judge/sandboxes/tee.py +31 -0
  24. rbx/grading/judge/storage.py +7 -1
  25. rbx/grading/steps.py +89 -201
  26. rbx/grading/steps_with_caching.py +15 -6
  27. rbx/resources/presets/default/problem/problem.rbx.yml +0 -2
  28. rbx/resources/templates/rbx.h +43 -2
  29. rbx/testing_utils.py +7 -0
  30. rbx/utils.py +104 -6
  31. {rbx_cp-0.13.4.dist-info → rbx_cp-0.13.6.dist-info}/METADATA +1 -1
  32. {rbx_cp-0.13.4.dist-info → rbx_cp-0.13.6.dist-info}/RECORD +35 -40
  33. rbx/grading/judge/sandboxes/isolate.py +0 -695
  34. rbx/grading/judge/sandboxes/timeit.py +0 -358
  35. rbx/grading/judge/test.py +0 -38
  36. rbx/grading/judge/testiso.py +0 -54
  37. rbx/grading/processing_context.py +0 -71
  38. rbx/resources/envs/isolate.rbx.yml +0 -36
  39. rbx/resources/presets/default/problem/sols/slow.cpp +0 -15
  40. {rbx_cp-0.13.4.dist-info → rbx_cp-0.13.6.dist-info}/LICENSE +0 -0
  41. {rbx_cp-0.13.4.dist-info → rbx_cp-0.13.6.dist-info}/WHEEL +0 -0
  42. {rbx_cp-0.13.4.dist-info → rbx_cp-0.13.6.dist-info}/entry_points.txt +0 -0
@@ -6,8 +6,6 @@ from textual.reactive import reactive
6
6
  from textual.screen import Screen
7
7
  from textual.widgets import Footer, Header, Label, ListItem, ListView
8
8
 
9
- from rbx.box import package
10
- from rbx.box.schema import TaskType
11
9
  from rbx.box.solutions import SolutionReportSkeleton
12
10
  from rbx.box.ui.screens.error import ErrorScreen
13
11
  from rbx.box.ui.screens.run_test_explorer import RunTestExplorerScreen
@@ -42,12 +40,6 @@ class RunExplorerScreen(Screen):
42
40
  tips.markup = True
43
41
  tips.display = False
44
42
  tips.border_title = 'Tips'
45
- pkg = package.find_problem_package_or_die()
46
- if pkg.type == TaskType.COMMUNICATION:
47
- tips.display = True
48
- tips.write(
49
- 'This is an interactive problem.\nYou can use the [bold blue]rbx --capture run[/bold blue] command to capture the interaction between the processes and see them here.'
50
- )
51
43
  yield tips
52
44
 
53
45
  def on_mount(self):
@@ -92,7 +92,7 @@ def get_run_testcase_metadata_markup(
92
92
  )
93
93
  lines.append(f'[b]Time:[/b] {time_str} / [b]Memory:[/b] {memory_str}')
94
94
  if checker_msg is not None:
95
- lines.append(f'[b]Checker:[/b] {checker_msg}')
95
+ lines.append(f'[b]Checker:[/b] {utils.escape_markup(checker_msg)}')
96
96
  return '\n'.join(lines)
97
97
 
98
98
 
@@ -102,7 +102,11 @@ def get_metadata_markup(entry: GenerationTestcaseEntry) -> str:
102
102
  if entry.metadata.copied_from is not None:
103
103
  lines.append(f'[b]Copied from:[/b] {entry.metadata.copied_from.inputPath}')
104
104
  if entry.metadata.generator_call is not None:
105
- lines.append(f'[b]Gen. call:[/b] {entry.metadata.generator_call}')
105
+ lines.append(
106
+ f'[b]Gen. call:[/b] {utils.escape_markup(str(entry.metadata.generator_call))}'
107
+ )
106
108
  if entry.metadata.generator_script is not None:
107
- lines.append(f'[b]Gen. script:[/b] {entry.metadata.generator_script}')
109
+ lines.append(
110
+ f'[b]Gen. script:[/b] {utils.escape_markup(str(entry.metadata.generator_script))}'
111
+ )
108
112
  return '\n'.join(lines)
@@ -44,7 +44,7 @@ class TestBoxWidget(Widget, can_focus=False):
44
44
  output: FileLog
45
45
  stderr: FileLog
46
46
  log: FileLog
47
- interaction: FileLog
47
+ interaction: InteractionBox
48
48
 
49
49
  def logs(self) -> Logs:
50
50
  return self.Logs(
rbx/box/unit.py CHANGED
@@ -41,7 +41,7 @@ class CheckerTestEntry(BaseModel):
41
41
  return ', '.join(res)
42
42
 
43
43
 
44
- def _extract_validator_test_entries(
44
+ def extract_validator_test_entries(
45
45
  tests: List[ValidatorTest],
46
46
  ) -> List[ValidatorTestEntry]:
47
47
  res: List[ValidatorTestEntry] = []
@@ -57,7 +57,7 @@ def _extract_validator_test_entries(
57
57
  return sorted(res, key=lambda x: x.input.name)
58
58
 
59
59
 
60
- def _extract_checker_test_entries(tests: List[CheckerTest]) -> List[CheckerTestEntry]:
60
+ def extract_checker_test_entries(tests: List[CheckerTest]) -> List[CheckerTestEntry]:
61
61
  res: List[CheckerTestEntry] = []
62
62
  seen: Set[pathlib.Path] = set()
63
63
  for test in tests:
@@ -94,7 +94,7 @@ def _get_validator_for_test(test: ValidatorTestEntry) -> Optional[CodeItem]:
94
94
  async def run_validator_unit_tests(progress: StatusProgress):
95
95
  pkg = package.find_problem_package_or_die()
96
96
 
97
- entries = _extract_validator_test_entries(pkg.unitTests.validator)
97
+ entries = extract_validator_test_entries(pkg.unitTests.validator)
98
98
 
99
99
  vals: List[CodeItem] = []
100
100
  for test in entries:
@@ -158,7 +158,7 @@ async def run_checker_unit_tests(progress: StatusProgress):
158
158
 
159
159
  console.console.rule('Checker tests', style='info')
160
160
 
161
- entries = _extract_checker_test_entries(pkg.unitTests.checker)
161
+ entries = extract_checker_test_entries(pkg.unitTests.checker)
162
162
  if not entries:
163
163
  console.console.print('No checker unit tests found.')
164
164
  return
rbx/box/validators.py CHANGED
@@ -119,6 +119,7 @@ async def _validate_testcase(
119
119
  extra_args=shlex.join(var_args) if var_args else None,
120
120
  )
121
121
 
122
+ message = package.get_digest_as_string(message_digest.value or '')
122
123
  if (
123
124
  run_log is not None
124
125
  and run_log.exitcode != 0
@@ -128,12 +129,13 @@ async def _validate_testcase(
128
129
  f'[error]Validator [item]{validator.path}[/item] failed unexpectedly.[/error]'
129
130
  )
130
131
  console.console.print(f'[error]Summary:[/error] {run_log.get_summary()}')
132
+ console.console.print(f'[error]Message:[/error] {message}')
133
+ console.console.print(f'[error]Testcase:[/error] {testcase}')
131
134
  raise typer.Exit(1)
132
135
 
133
136
  log_overview = ''
134
137
  if log_digest.value is not None:
135
138
  log_overview = package.get_digest_as_string(log_digest.value or '')
136
- message = package.get_digest_as_string(message_digest.value or '')
137
139
  return (
138
140
  run_log is not None and run_log.exitcode == 0,
139
141
  message,
rbx/grading/caching.py CHANGED
@@ -88,15 +88,26 @@ def _check_digests(artifacts_list: List[GradingArtifacts]):
88
88
  produced.add(id(output.digest))
89
89
 
90
90
 
91
- def _build_digest_list(artifacts_list: List[GradingArtifacts]) -> List[DigestHolder]:
92
- digests = []
91
+ def _build_artifact_with_digest_list(
92
+ artifacts_list: List[GradingArtifacts],
93
+ ) -> List[GradingFileOutput]:
94
+ outputs = []
93
95
  for artifacts in artifacts_list:
94
96
  for output in artifacts.outputs:
95
97
  if output.hash and output.digest is None:
96
98
  output.digest = DigestHolder()
97
99
  if output.digest is None:
98
100
  continue
99
- digests.append(output.digest)
101
+ outputs.append(output)
102
+ return outputs
103
+
104
+
105
+ def _build_digest_list(artifacts_list: List[GradingArtifacts]) -> List[DigestHolder]:
106
+ outputs = _build_artifact_with_digest_list(artifacts_list)
107
+ digests = []
108
+ for output in outputs:
109
+ assert output.digest is not None
110
+ digests.append(output.digest)
100
111
  return digests
101
112
 
102
113
 
@@ -115,27 +126,44 @@ def _build_fingerprint_list(
115
126
  return fingerprints
116
127
 
117
128
 
118
- def _maybe_check_integrity(output: GradingFileOutput):
129
+ def _maybe_check_integrity(output: GradingFileOutput, integrity_digest: str):
119
130
  if not grading_context.should_check_integrity():
120
131
  return
121
- if output.dest is None or not output.dest.is_symlink():
132
+ if not output.hash:
133
+ return
134
+ if output.dest is None or not output.dest.is_symlink() or not output.dest.is_file():
135
+ # Only makes sense if the file EXISTS and IS A SYMLINK pointing to an
136
+ # EXISTING storage file.
137
+ # If the storage file ceases to exist, we can simply evict from the cache.
122
138
  return
123
- if output.digest is None or output.digest.value is None:
139
+ if output.digest is None:
124
140
  return
125
141
  with output.dest.open('rb') as f:
126
- fingerprint = digest_cooperatively(f)
127
- if fingerprint != output.digest.value:
142
+ output_digest = digest_cooperatively(f)
143
+ if output_digest != integrity_digest:
128
144
  raise ValueError(
129
145
  f'Cache was tampered with, file {output.dest} has changed since it was cached.\nPlease run `rbx clean` to reset the cache.'
130
146
  )
131
147
 
132
148
 
133
- def _build_output_fingerprint_list(artifacts_list: List[GradingArtifacts]) -> List[str]:
149
+ def _check_digest_list_integrity(
150
+ artifacts_list: List[GradingArtifacts], integrity_digests: List[Optional[str]]
151
+ ):
152
+ outputs = _build_artifact_with_digest_list(artifacts_list)
153
+ assert len(outputs) == len(integrity_digests)
154
+ for output, integrity_digest in zip(outputs, integrity_digests):
155
+ assert output.digest is not None
156
+ if integrity_digest is None:
157
+ continue
158
+ _maybe_check_integrity(output, integrity_digest)
159
+
160
+
161
+ def _build_output_fingerprint_list(
162
+ artifacts_list: List[GradingArtifacts],
163
+ ) -> List[str]:
134
164
  fingerprints = []
135
165
  for artifacts in artifacts_list:
136
166
  for output in artifacts.outputs:
137
- if output.hash:
138
- _maybe_check_integrity(output)
139
167
  if output.dest is None or output.intermediate or output.hash:
140
168
  continue
141
169
  if not output.dest.is_file():
@@ -160,7 +188,10 @@ def _build_cache_fingerprint(
160
188
  ) -> CacheFingerprint:
161
189
  digests = [digest.value for digest in _build_digest_list(artifacts_list)]
162
190
  fingerprints = _build_fingerprint_list(artifacts_list, cacher)
163
- output_fingerprints = _build_output_fingerprint_list(artifacts_list)
191
+ output_fingerprints = _build_output_fingerprint_list(
192
+ artifacts_list,
193
+ )
194
+
164
195
  logs = _build_logs_list(artifacts_list)
165
196
  return CacheFingerprint(
166
197
  digests=digests,
@@ -340,7 +371,7 @@ class DependencyCache:
340
371
  self.transient_db = SqliteDict(str(tmp_dir / '.cache_db'), autocommit=True)
341
372
  atexit.register(lambda: self.db.close())
342
373
  atexit.register(lambda: self.transient_db.close())
343
- atexit.register(lambda: shutil.rmtree(tmp_dir))
374
+ atexit.register(lambda: shutil.rmtree(tmp_dir, ignore_errors=True))
344
375
 
345
376
  def _cache_name(self) -> str:
346
377
  return str(self.root / '.cache_db')
@@ -389,7 +420,10 @@ class DependencyCache:
389
420
  if fingerprint is None:
390
421
  return False
391
422
 
392
- reference_fingerprint = _build_cache_fingerprint(artifact_list, self.cacher)
423
+ reference_fingerprint = _build_cache_fingerprint(
424
+ artifact_list,
425
+ self.cacher,
426
+ )
393
427
 
394
428
  if not _fingerprints_match(fingerprint, reference_fingerprint):
395
429
  self._evict_from_cache(key)
@@ -399,6 +433,11 @@ class DependencyCache:
399
433
  self._evict_from_cache(key)
400
434
  return False
401
435
 
436
+ # Check whether existing storage files were not tampered with.
437
+ _check_digest_list_integrity(
438
+ artifact_list,
439
+ fingerprint.digests,
440
+ )
402
441
  reference_digests = _build_digest_list(artifact_list)
403
442
 
404
443
  # Apply digest changes.
@@ -422,6 +461,10 @@ class DependencyCache:
422
461
  for logs, reference_logs in zip(fingerprint.logs, reference_fingerprint.logs):
423
462
  if logs.run is not None:
424
463
  reference_logs.run = logs.run.model_copy(deep=True)
464
+ if logs.interactor_run is not None:
465
+ reference_logs.interactor_run = logs.interactor_run.model_copy(
466
+ deep=True
467
+ )
425
468
  if logs.preprocess is not None:
426
469
  reference_logs.preprocess = [
427
470
  log.model_copy(deep=True) for log in logs.preprocess
@@ -448,4 +491,11 @@ class DependencyCache:
448
491
  if not are_artifacts_ok(artifact_list, self.cacher):
449
492
  return
450
493
 
451
- self._store_in_cache(key, _build_cache_fingerprint(artifact_list, self.cacher))
494
+ reference_fingerprint = _build_cache_fingerprint(
495
+ artifact_list,
496
+ self.cacher,
497
+ )
498
+ self._store_in_cache(
499
+ key,
500
+ reference_fingerprint,
501
+ )
@@ -73,7 +73,9 @@ class FileCacher:
73
73
  self.file_dir = pathlib.Path(tempfile.mkdtemp())
74
74
  # Delete this directory on exit since it has a random name and
75
75
  # won't be used again.
76
- atexit.register(lambda: shutil.rmtree(str(self.file_dir)))
76
+ atexit.register(
77
+ lambda: shutil.rmtree(str(self.file_dir), ignore_errors=True)
78
+ )
77
79
  else:
78
80
  assert folder is not None
79
81
  self.file_dir = folder / 'fs-cache-shared'
@@ -84,7 +86,7 @@ class FileCacher:
84
86
  self.temp_dir = pathlib.Path(
85
87
  tempfile.mkdtemp(dir=self.file_dir, prefix='_temp')
86
88
  )
87
- atexit.register(lambda: shutil.rmtree(str(self.temp_dir)))
89
+ atexit.register(lambda: shutil.rmtree(str(self.temp_dir), ignore_errors=True))
88
90
  # Just to make sure it was created.
89
91
 
90
92
  def is_shared(self) -> bool:
@@ -526,7 +528,7 @@ class FileCacher:
526
528
  """
527
529
  if self.is_shared():
528
530
  raise Exception('You may not destroy a shared cache.')
529
- shutil.rmtree(str(self.file_dir))
531
+ shutil.rmtree(str(self.file_dir), ignore_errors=True)
530
532
 
531
533
  def list(self) -> List[storage.FileWithMetadata]:
532
534
  """List the files available in the storage.
@@ -0,0 +1,300 @@
1
+ import dataclasses
2
+ import os
3
+ import pathlib
4
+ import resource
5
+ import subprocess
6
+ import sys
7
+ import threading
8
+ import typing
9
+ from enum import Enum
10
+ from time import monotonic
11
+ from typing import IO, Any, Dict, List, Optional, Union
12
+
13
+ import psutil
14
+
15
+ from rbx.utils import PathOrStr
16
+
17
+ FileLike = Union[PathOrStr, IO[bytes], int]
18
+
19
+
20
+ def _maybe_close_files(files):
21
+ for fobj in files:
22
+ if isinstance(fobj, int):
23
+ continue
24
+ fobj.close()
25
+
26
+
27
+ def _is_pathlike(obj: Any) -> bool:
28
+ return isinstance(obj, str) or isinstance(obj, pathlib.Path)
29
+
30
+
31
+ @dataclasses.dataclass
32
+ class ProgramIO:
33
+ input: FileLike = subprocess.PIPE
34
+ output: FileLike = subprocess.PIPE
35
+ stderr: FileLike = subprocess.PIPE
36
+
37
+ def get_file_objects(self):
38
+ if isinstance(self.input, int):
39
+ input_fobj = self.input
40
+ elif _is_pathlike(self.input):
41
+ input_fobj = pathlib.Path(typing.cast(str, self.input)).open('r')
42
+ else:
43
+ input_fobj = typing.cast(IO[bytes], self.input)
44
+ if isinstance(self.output, int):
45
+ output_fobj = self.output
46
+ elif _is_pathlike(self.output):
47
+ output_path = pathlib.Path(typing.cast(str, self.output))
48
+ output_path.parent.mkdir(parents=True, exist_ok=True)
49
+ output_fobj = output_path.open('w')
50
+ else:
51
+ output_fobj = typing.cast(IO[bytes], self.output)
52
+ if isinstance(self.stderr, int):
53
+ stderr_fobj = self.stderr
54
+ elif _is_pathlike(self.stderr):
55
+ stderr_path = pathlib.Path(typing.cast(str, self.stderr))
56
+ stderr_path.parent.mkdir(parents=True, exist_ok=True)
57
+ stderr_fobj = stderr_path.open('w')
58
+ else:
59
+ stderr_fobj = typing.cast(IO[bytes], self.stderr)
60
+ return input_fobj, output_fobj, stderr_fobj
61
+
62
+
63
+ @dataclasses.dataclass
64
+ class ProgramPipes:
65
+ input: Optional[IO[bytes]] = None
66
+ output: Optional[IO[bytes]] = None
67
+ stderr: Optional[IO[bytes]] = None
68
+
69
+
70
+ @dataclasses.dataclass
71
+ class ProgramParams:
72
+ io: ProgramIO = dataclasses.field(default_factory=ProgramIO)
73
+ chdir: Optional[pathlib.Path] = None
74
+ time_limit: Optional[float] = None # seconds
75
+ wall_time_limit: Optional[float] = None # seconds
76
+ memory_limit: Optional[int] = None # megabytes
77
+ fs_limit: Optional[int] = None # kilobytes
78
+ env: Dict[str, str] = dataclasses.field(default_factory=dict)
79
+ pgid: Optional[int] = None
80
+
81
+
82
+ def get_preexec_fn(params: ProgramParams):
83
+ def preexec_fn():
84
+ os.setpgid(0, params.pgid or 0)
85
+ if params.time_limit is not None:
86
+ time_limit_in_ms = int(params.time_limit * 1000)
87
+ rlimit_cpu = int((time_limit_in_ms + 999) // 1000)
88
+ resource.setrlimit(resource.RLIMIT_CPU, (rlimit_cpu, rlimit_cpu + 1))
89
+ if params.fs_limit is not None:
90
+ fs_limit = params.fs_limit * 1024 # in bytes
91
+ resource.setrlimit(resource.RLIMIT_FSIZE, (fs_limit + 1, fs_limit * 2))
92
+
93
+ return preexec_fn
94
+
95
+
96
+ def get_memory_usage(ru: resource.struct_rusage) -> int:
97
+ """Get memory usage in bytes from resource usage statistics.
98
+
99
+ Returns the total memory usage (RSS + shared memory segments) in bytes.
100
+
101
+ Platform differences in ru.ru_maxrss:
102
+ - macOS/Darwin: ru.ru_maxrss is in bytes
103
+ - Linux: ru.ru_maxrss is in kilobytes
104
+
105
+ This function normalizes the result to always return bytes.
106
+
107
+ Args:
108
+ ru: Resource usage statistics from os.wait4() or similar
109
+
110
+ Returns:
111
+ int: Total memory usage in bytes
112
+ """
113
+ if sys.platform == 'darwin':
114
+ # On macOS, ru.ru_maxrss is already in bytes
115
+ return ru.ru_maxrss + ru.ru_ixrss * 1024
116
+ # On Linux, ru.ru_maxrss is in kilobytes, so convert to bytes
117
+ return (ru.ru_maxrss + ru.ru_ixrss + ru.ru_idrss + ru.ru_isrss) * 1024
118
+
119
+
120
+ def get_cpu_time(ru: resource.struct_rusage) -> float:
121
+ """Get CPU time in seconds from resource usage statistics.
122
+
123
+ Returns the total CPU time (user + system) in seconds.
124
+
125
+ Args:
126
+ ru: Resource usage statistics from os.wait4() or similar
127
+
128
+ Returns:
129
+ float: Total CPU time in seconds
130
+ """
131
+ return ru.ru_utime + ru.ru_stime
132
+
133
+
134
+ def get_file_sizes(io: ProgramIO):
135
+ return _get_file_size(io.output) + _get_file_size(io.stderr)
136
+
137
+
138
+ def _get_file_size(filename: Optional[FileLike]) -> int:
139
+ if filename is None or not _is_pathlike(filename):
140
+ return 0
141
+ path = pathlib.Path(typing.cast(str, filename))
142
+ if not path.is_file():
143
+ return 0
144
+ return path.stat().st_size
145
+
146
+
147
+ class ProgramCode(Enum):
148
+ RE = 'RE'
149
+ SG = 'SG'
150
+ TO = 'TO'
151
+ WT = 'WT'
152
+ ML = 'ML'
153
+ OL = 'OL'
154
+ TE = 'TE'
155
+
156
+
157
+ @dataclasses.dataclass
158
+ class ProgramResult:
159
+ exitcode: int
160
+ wall_time: float
161
+ cpu_time: float
162
+ memory_used: int
163
+ file_sizes: int
164
+ program_codes: List[ProgramCode]
165
+ killing_signal: Optional[int] = None
166
+ alarm_msg: Optional[str] = None
167
+
168
+
169
+ class Program:
170
+ def __init__(self, command: List[str], params: ProgramParams):
171
+ self.command = command
172
+ self.params = params
173
+ self.popen: Optional[subprocess.Popen] = None
174
+ self._files = []
175
+
176
+ self._stop_wall_handler = threading.Event()
177
+ self._stop_alarm_handler = threading.Event()
178
+ self._alarm_msg = ''
179
+
180
+ self._run()
181
+
182
+ @property
183
+ def pipes(self) -> ProgramPipes:
184
+ assert self.popen is not None
185
+ return ProgramPipes(
186
+ input=self.popen.stdin,
187
+ output=self.popen.stdout,
188
+ stderr=self.popen.stderr,
189
+ )
190
+
191
+ @property
192
+ def pid(self) -> int:
193
+ assert self.popen is not None
194
+ return self.popen.pid
195
+
196
+ def _kill_process(self):
197
+ if self.popen is not None:
198
+ self.popen.kill()
199
+
200
+ def _handle_wall(self):
201
+ if self._stop_wall_handler.wait(self.params.wall_time_limit):
202
+ return
203
+ self._stop_alarm_handler.set()
204
+ self._alarm_msg = 'wall timelimit'
205
+ self._kill_process()
206
+
207
+ def _handle_alarm(self):
208
+ if self._stop_alarm_handler.wait(0.3):
209
+ return
210
+ try:
211
+ process = psutil.Process(self.pid)
212
+ if self.params.time_limit is not None:
213
+ times = process.cpu_times()
214
+ cpu_time = times.user + times.system
215
+ if cpu_time > self.params.time_limit:
216
+ self._alarm_msg = 'timelimit'
217
+ self._kill_process()
218
+ if self.params.memory_limit is not None:
219
+ memory_info = process.memory_info()
220
+ memory_used = memory_info.rss
221
+ if memory_used > self.params.memory_limit * 1024 * 1024:
222
+ self._alarm_msg = 'memorylimit'
223
+ self._kill_process()
224
+ self._stop_alarm_handler.clear()
225
+ self._handle_alarm()
226
+ except psutil.NoSuchProcess:
227
+ return
228
+
229
+ def _run(self):
230
+ self._files = self.params.io.get_file_objects()
231
+ self.popen = subprocess.Popen(
232
+ self.command,
233
+ stdin=self._files[0],
234
+ stdout=self._files[1],
235
+ stderr=self._files[2],
236
+ cwd=self.params.chdir,
237
+ env={**os.environ, **self.params.env},
238
+ preexec_fn=get_preexec_fn(self.params),
239
+ close_fds=True,
240
+ )
241
+ self.start_time = monotonic()
242
+
243
+ threading.Thread(target=self._handle_wall, daemon=True).start()
244
+ threading.Thread(target=self._handle_alarm, daemon=True).start()
245
+
246
+ def process_exit(self, exitstatus, ru) -> ProgramResult:
247
+ wall_time = monotonic() - self.start_time
248
+ cpu_time = get_cpu_time(ru)
249
+ memory_used = get_memory_usage(ru)
250
+ file_sizes = get_file_sizes(self.params.io)
251
+ exitcode = os.waitstatus_to_exitcode(exitstatus)
252
+ killing_signal = None
253
+ program_codes = []
254
+
255
+ if exitcode < 0:
256
+ killing_signal = -exitcode
257
+ program_codes.append(ProgramCode.SG)
258
+ if exitcode > 0:
259
+ program_codes.append(ProgramCode.RE)
260
+ if self.params.time_limit is not None and (
261
+ cpu_time > self.params.time_limit or -exitcode == 24
262
+ ):
263
+ program_codes.append(ProgramCode.TO)
264
+ if (
265
+ self.params.wall_time_limit is not None
266
+ and wall_time > self.params.wall_time_limit
267
+ ):
268
+ program_codes.append(ProgramCode.WT)
269
+ program_codes.append(ProgramCode.TO)
270
+ # Memory limit checking: Two ways a process can exceed memory limits:
271
+ # 1. Runtime monitoring (_handle_alarm) kills the process during execution
272
+ # 2. Post-execution check using ru.ru_maxrss detects peak memory usage exceeded limit
273
+ # Both memory_used (from ru.ru_maxrss) and memory_limit (converted to bytes) are in bytes
274
+ if (
275
+ self.params.memory_limit is not None
276
+ and memory_used > self.params.memory_limit * 1024 * 1024
277
+ or self._alarm_msg == 'memorylimit'
278
+ ):
279
+ program_codes.append(ProgramCode.ML)
280
+ if (
281
+ self.params.fs_limit is not None
282
+ and file_sizes > self.params.fs_limit * 1024
283
+ ):
284
+ program_codes.append(ProgramCode.OL)
285
+
286
+ return ProgramResult(
287
+ exitcode=exitcode,
288
+ wall_time=wall_time,
289
+ cpu_time=cpu_time,
290
+ memory_used=memory_used,
291
+ file_sizes=file_sizes,
292
+ program_codes=program_codes,
293
+ killing_signal=killing_signal,
294
+ alarm_msg=self._alarm_msg or None,
295
+ )
296
+
297
+ def wait(self):
298
+ assert self.popen is not None
299
+ _, exitstatus, ru = os.wait4(self.pid, 0)
300
+ return self.process_exit(exitstatus, ru)