rbx.cp 0.7.0__py3-none-any.whl → 0.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- rbx/box/cli.py +79 -31
- rbx/box/code.py +131 -82
- rbx/box/global_package.py +74 -0
- rbx/box/package.py +6 -19
- rbx/box/remote.py +19 -0
- rbx/box/sanitizers/warning_stack.py +3 -3
- rbx/box/solutions.py +13 -7
- rbx/box/stats.py +10 -0
- rbx/box/stresses.py +45 -64
- rbx/box/stressing/finder_parser.py +11 -16
- rbx/box/tasks.py +33 -22
- rbx/box/tooling/boca/scraper.py +1 -1
- rbx/grading/caching.py +98 -47
- rbx/grading/debug_context.py +31 -0
- rbx/grading/grading_context.py +96 -0
- rbx/grading/judge/cacher.py +93 -21
- rbx/grading/judge/sandbox.py +6 -3
- rbx/grading/judge/sandboxes/timeit.py +1 -1
- rbx/grading/judge/storage.py +169 -35
- rbx/grading/profiling.py +126 -0
- rbx/grading/steps.py +44 -16
- rbx/grading/steps_with_caching.py +52 -26
- rbx/resources/presets/default/contest/.gitignore +2 -0
- rbx/resources/presets/default/contest/contest.rbx.yml +14 -1
- rbx/resources/presets/default/contest/statement/contest.rbx.tex +25 -86
- rbx/resources/presets/default/contest/statement/icpc.sty +322 -0
- rbx/resources/presets/default/contest/statement/instructions.tex +40 -0
- rbx/resources/presets/default/contest/statement/logo.png +0 -0
- rbx/resources/presets/default/contest/statement/template.rbx.tex +45 -36
- rbx/resources/presets/default/preset.rbx.yml +2 -2
- rbx/resources/presets/default/problem/problem.rbx.yml +12 -8
- rbx/resources/presets/default/problem/statement/icpc.sty +322 -0
- rbx/resources/presets/default/problem/statement/template.rbx.tex +47 -79
- {rbx_cp-0.7.0.dist-info → rbx_cp-0.8.0.dist-info}/METADATA +3 -1
- {rbx_cp-0.7.0.dist-info → rbx_cp-0.8.0.dist-info}/RECORD +43 -36
- rbx/resources/presets/default/contest/statement/olymp.sty +0 -250
- rbx/resources/presets/default/problem/statement/olymp.sty +0 -250
- /rbx/resources/presets/default/problem/{gen.cpp → gens/gen.cpp} +0 -0
- /rbx/resources/presets/default/problem/{tests → manual_tests}/samples/000.in +0 -0
- /rbx/resources/presets/default/problem/{tests → manual_tests}/samples/001.in +0 -0
- /rbx/resources/presets/default/problem/{random.py → testplan/random.py} +0 -0
- /rbx/resources/presets/default/problem/{random.txt → testplan/random.txt} +0 -0
- {rbx_cp-0.7.0.dist-info → rbx_cp-0.8.0.dist-info}/LICENSE +0 -0
- {rbx_cp-0.7.0.dist-info → rbx_cp-0.8.0.dist-info}/WHEEL +0 -0
- {rbx_cp-0.7.0.dist-info → rbx_cp-0.8.0.dist-info}/entry_points.txt +0 -0
rbx/grading/judge/storage.py
CHANGED
@@ -3,13 +3,21 @@ import io
|
|
3
3
|
import logging
|
4
4
|
import pathlib
|
5
5
|
import tempfile
|
6
|
+
import typing
|
6
7
|
from abc import ABC, abstractmethod
|
7
|
-
from typing import IO, AnyStr, List, Optional
|
8
|
+
from typing import IO, AnyStr, Dict, List, Optional, Type, TypeVar
|
9
|
+
|
10
|
+
import lz4.frame
|
11
|
+
from pydantic import BaseModel
|
12
|
+
|
13
|
+
from rbx.grading import grading_context
|
8
14
|
|
9
15
|
logger = logging.getLogger(__name__)
|
10
16
|
|
11
17
|
TOMBSTONE = 'x'
|
12
18
|
|
19
|
+
BaseModelT = TypeVar('BaseModelT', bound=BaseModel)
|
20
|
+
|
13
21
|
|
14
22
|
def copyfileobj(
|
15
23
|
source_fobj: IO[AnyStr],
|
@@ -43,16 +51,24 @@ def copyfileobj(
|
|
43
51
|
maxlen -= written
|
44
52
|
|
45
53
|
|
54
|
+
COMPRESSION_LEVEL = 5
|
55
|
+
|
56
|
+
|
57
|
+
class CompressionMetadata(BaseModel):
|
58
|
+
compression_level: int
|
59
|
+
|
60
|
+
|
46
61
|
@dataclasses.dataclass
|
47
62
|
class PendingFile:
|
48
63
|
fd: IO[bytes]
|
49
64
|
filename: str
|
65
|
+
metadata: Dict[str, Optional[BaseModel]] = dataclasses.field(default_factory=dict)
|
50
66
|
|
51
67
|
|
52
68
|
@dataclasses.dataclass
|
53
|
-
class
|
69
|
+
class FileWithMetadata:
|
54
70
|
filename: str
|
55
|
-
|
71
|
+
metadata: List[str]
|
56
72
|
|
57
73
|
|
58
74
|
class Storage(ABC):
|
@@ -81,13 +97,15 @@ class Storage(ABC):
|
|
81
97
|
pass
|
82
98
|
|
83
99
|
@abstractmethod
|
84
|
-
def commit_file(
|
100
|
+
def commit_file(
|
101
|
+
self, file: PendingFile, metadata: Optional[Dict[str, BaseModel]] = None
|
102
|
+
) -> bool:
|
85
103
|
"""Commit a file created by create_file() to be stored.
|
86
104
|
Given a file object returned by create_file(), this function populates
|
87
105
|
the database to record that this file now legitimately exists and can
|
88
106
|
be used.
|
89
|
-
fobj (fileobj): the object returned by create_file()
|
90
107
|
file (PendingFile): the file to commit.
|
108
|
+
metadata (Dict[str, BaseModel]): the metadata of the file.
|
91
109
|
return (bool): True if the file was committed successfully, False if
|
92
110
|
there was already a file with the same filename in the database. This
|
93
111
|
shouldn't make any difference to the caller, except for testing
|
@@ -96,19 +114,40 @@ class Storage(ABC):
|
|
96
114
|
pass
|
97
115
|
|
98
116
|
@abstractmethod
|
99
|
-
def
|
100
|
-
"""
|
117
|
+
def set_metadata(self, filename: str, key: str, value: Optional[BaseModel]):
|
118
|
+
"""Set the metadata of a file given its filename.
|
119
|
+
filename (unicode): the filename of the file to set the metadata.
|
120
|
+
key (unicode): the key of the metadata to set.
|
121
|
+
value (BaseModel): the value of the metadata to set.
|
122
|
+
"""
|
101
123
|
pass
|
102
124
|
|
103
125
|
@abstractmethod
|
104
|
-
def
|
105
|
-
|
106
|
-
|
107
|
-
|
126
|
+
def get_metadata(
|
127
|
+
self, filename: str, key: str, model_cls: Type[BaseModel]
|
128
|
+
) -> Optional[BaseModel]:
|
129
|
+
"""Get the metadata of a file given its filename and key.
|
130
|
+
filename (unicode): the filename of the file to get the metadata.
|
131
|
+
key (unicode): the key of the metadata to get.
|
132
|
+
model_cls (Type[BaseModel]): the model class of the metadata.
|
133
|
+
return (BaseModel): the value of the metadata.
|
108
134
|
raise (KeyError): if the file cannot be found.
|
109
135
|
"""
|
110
136
|
pass
|
111
137
|
|
138
|
+
@abstractmethod
|
139
|
+
def list_metadata(self, filename: str) -> List[str]:
|
140
|
+
"""List the metadata of a file given its filename.
|
141
|
+
filename (unicode): the filename of the file to list the metadata.
|
142
|
+
return (List[str]): the list of metadata keys.
|
143
|
+
"""
|
144
|
+
pass
|
145
|
+
|
146
|
+
@abstractmethod
|
147
|
+
def exists(self, filename: str) -> bool:
|
148
|
+
"""Check if a file exists in the storage."""
|
149
|
+
pass
|
150
|
+
|
112
151
|
@abstractmethod
|
113
152
|
def get_size(self, filename: str) -> int:
|
114
153
|
"""Return the size of a file given its filename.
|
@@ -127,7 +166,7 @@ class Storage(ABC):
|
|
127
166
|
pass
|
128
167
|
|
129
168
|
@abstractmethod
|
130
|
-
def list(self) -> List[
|
169
|
+
def list(self) -> List[FileWithMetadata]:
|
131
170
|
"""List the files available in the storage.
|
132
171
|
return ([(unicode, unicode)]): a list of pairs, each
|
133
172
|
representing a file in the form (filename, description).
|
@@ -138,6 +177,10 @@ class Storage(ABC):
|
|
138
177
|
def path_for_symlink(self, filename: str) -> Optional[pathlib.Path]:
|
139
178
|
pass
|
140
179
|
|
180
|
+
@abstractmethod
|
181
|
+
def filename_from_symlink(self, link: pathlib.Path) -> Optional[str]:
|
182
|
+
pass
|
183
|
+
|
141
184
|
|
142
185
|
class NullStorage(Storage):
|
143
186
|
"""This backend is always empty, it just drops each file that
|
@@ -153,41 +196,54 @@ class NullStorage(Storage):
|
|
153
196
|
def create_file(self, digest: str) -> Optional[PendingFile]:
|
154
197
|
return None
|
155
198
|
|
156
|
-
def commit_file(
|
199
|
+
def commit_file(
|
200
|
+
self, file: PendingFile, metadata: Optional[Dict[str, BaseModel]] = None
|
201
|
+
) -> bool:
|
157
202
|
return False
|
158
203
|
|
159
|
-
def
|
160
|
-
|
204
|
+
def set_metadata(self, filename: str, key: str, value: Optional[BaseModel]):
|
205
|
+
pass
|
161
206
|
|
162
|
-
def
|
207
|
+
def get_metadata(
|
208
|
+
self, filename: str, key: str, model_cls: Type[BaseModel]
|
209
|
+
) -> Optional[BaseModel]:
|
163
210
|
raise KeyError('File not found.')
|
164
211
|
|
212
|
+
def list_metadata(self, filename: str) -> List[str]:
|
213
|
+
return []
|
214
|
+
|
215
|
+
def exists(self, filename: str) -> bool:
|
216
|
+
return False
|
217
|
+
|
165
218
|
def get_size(self, digest: str) -> int:
|
166
219
|
raise KeyError('File not found.')
|
167
220
|
|
168
221
|
def delete(self, digest: str):
|
169
222
|
pass
|
170
223
|
|
171
|
-
def list(self) -> List[
|
224
|
+
def list(self) -> List[FileWithMetadata]:
|
172
225
|
return list()
|
173
226
|
|
174
227
|
def path_for_symlink(self, digest: str) -> Optional[pathlib.Path]:
|
175
228
|
return None
|
176
229
|
|
230
|
+
def filename_from_symlink(self, link: pathlib.Path) -> Optional[str]:
|
231
|
+
return None
|
232
|
+
|
177
233
|
|
178
234
|
class FilesystemStorage(Storage):
|
179
235
|
"""This class implements a backend for FileCacher that keeps all
|
180
236
|
the files in a file system directory, named after their filename.
|
181
237
|
"""
|
182
238
|
|
183
|
-
def __init__(self, path: pathlib.Path):
|
239
|
+
def __init__(self, path: pathlib.Path, compress: bool = False):
|
184
240
|
"""Initialize the backend.
|
185
241
|
path (string): the base path for the storage.
|
186
242
|
"""
|
187
243
|
self.path = path
|
188
|
-
|
244
|
+
self.compress = compress
|
189
245
|
# Create the directory if it doesn't exist
|
190
|
-
path.mkdir(parents=True, exist_ok=True)
|
246
|
+
(path / '.metadata').mkdir(parents=True, exist_ok=True)
|
191
247
|
|
192
248
|
def get_file(self, filename: str) -> IO[bytes]:
|
193
249
|
"""See FileCacherBackend.get_file()."""
|
@@ -196,6 +252,18 @@ class FilesystemStorage(Storage):
|
|
196
252
|
if not file_path.is_file():
|
197
253
|
raise KeyError('File not found.')
|
198
254
|
|
255
|
+
compression_metadata = self.get_metadata(
|
256
|
+
filename, 'compression', CompressionMetadata
|
257
|
+
)
|
258
|
+
if compression_metadata is not None:
|
259
|
+
return typing.cast(
|
260
|
+
IO[bytes],
|
261
|
+
lz4.frame.open(
|
262
|
+
file_path,
|
263
|
+
mode='rb',
|
264
|
+
compression_level=compression_metadata.compression_level,
|
265
|
+
),
|
266
|
+
)
|
199
267
|
return file_path.open('rb')
|
200
268
|
|
201
269
|
def create_file(self, filename: str) -> Optional[PendingFile]:
|
@@ -211,13 +279,39 @@ class FilesystemStorage(Storage):
|
|
211
279
|
temp_file = tempfile.NamedTemporaryFile(
|
212
280
|
'wb', delete=False, prefix='.tmp.', suffix=filename, dir=self.path
|
213
281
|
)
|
214
|
-
|
215
|
-
|
216
|
-
|
282
|
+
metadata: Dict[str, Optional[BaseModel]] = {'compression': None}
|
283
|
+
if self.compress or grading_context.should_compress():
|
284
|
+
fd_name = temp_file.name
|
285
|
+
level = grading_context.get_compression_level()
|
286
|
+
temp_file = typing.cast(
|
287
|
+
IO[bytes],
|
288
|
+
lz4.frame.open(
|
289
|
+
temp_file,
|
290
|
+
mode='wb',
|
291
|
+
compression_level=level,
|
292
|
+
),
|
293
|
+
)
|
294
|
+
temp_file.name = fd_name # type: ignore
|
295
|
+
metadata['compression'] = CompressionMetadata(compression_level=level)
|
296
|
+
|
297
|
+
return PendingFile(fd=temp_file, filename=filename, metadata=metadata)
|
298
|
+
|
299
|
+
def commit_file(
|
300
|
+
self, file: PendingFile, metadata: Optional[Dict[str, BaseModel]] = None
|
301
|
+
) -> bool:
|
217
302
|
"""See FileCacherBackend.commit_file()."""
|
218
303
|
file.fd.close()
|
219
304
|
|
220
305
|
file_path: pathlib.Path = self.path / file.filename
|
306
|
+
file_path.parent.mkdir(parents=True, exist_ok=True)
|
307
|
+
|
308
|
+
for key, value in file.metadata.items():
|
309
|
+
self._set_metadata(file.filename, key, value)
|
310
|
+
|
311
|
+
if metadata is not None:
|
312
|
+
for key, value in metadata.items():
|
313
|
+
self._set_metadata(file.filename, key, value)
|
314
|
+
|
221
315
|
# Move it into place in the cache. Skip if it already exists, and
|
222
316
|
# delete the temporary file instead.
|
223
317
|
if not file_path.is_file():
|
@@ -231,21 +325,43 @@ class FilesystemStorage(Storage):
|
|
231
325
|
pathlib.PosixPath(file.fd.name).unlink()
|
232
326
|
return False
|
233
327
|
|
328
|
+
def _get_metadata_path(self, filename: str, key: str) -> pathlib.Path:
|
329
|
+
return self.path / '.metadata' / f'{filename}__{key}.json'
|
330
|
+
|
331
|
+
def _set_metadata(self, filename: str, key: str, value: Optional[BaseModel]):
|
332
|
+
if value is None:
|
333
|
+
self._get_metadata_path(filename, key).unlink(missing_ok=True)
|
334
|
+
else:
|
335
|
+
metadata_path = self._get_metadata_path(filename, key)
|
336
|
+
metadata_path.parent.mkdir(parents=True, exist_ok=True)
|
337
|
+
metadata_path.write_text(value.model_dump_json())
|
338
|
+
|
339
|
+
def set_metadata(self, filename: str, key: str, value: Optional[BaseModel]):
|
340
|
+
if not self.exists(filename):
|
341
|
+
raise KeyError('File not found.')
|
342
|
+
|
343
|
+
self._set_metadata(filename, key, value)
|
344
|
+
|
345
|
+
def get_metadata(
|
346
|
+
self, filename: str, key: str, model_cls: Type[BaseModelT]
|
347
|
+
) -> Optional[BaseModelT]:
|
348
|
+
path = self._get_metadata_path(filename, key)
|
349
|
+
if not path.is_file():
|
350
|
+
return None
|
351
|
+
return model_cls.model_validate_json(path.read_text())
|
352
|
+
|
353
|
+
def list_metadata(self, filename: str) -> List[str]:
|
354
|
+
return [
|
355
|
+
path.stem.split('__')[1]
|
356
|
+
for path in (self.path / '.metadata').glob(f'{filename}__*.json')
|
357
|
+
]
|
358
|
+
|
234
359
|
def exists(self, filename: str) -> bool:
|
235
360
|
"""See FileCacherBackend.exists()."""
|
236
361
|
file_path: pathlib.Path = self.path / filename
|
237
362
|
|
238
363
|
return file_path.is_file()
|
239
364
|
|
240
|
-
def describe(self, filename: str) -> str:
|
241
|
-
"""See FileCacherBackend.describe()."""
|
242
|
-
file_path: pathlib.Path = self.path / filename
|
243
|
-
|
244
|
-
if not file_path.is_file():
|
245
|
-
raise KeyError('File not found.')
|
246
|
-
|
247
|
-
return ''
|
248
|
-
|
249
365
|
def get_size(self, filename: str) -> int:
|
250
366
|
"""See FileCacherBackend.get_size()."""
|
251
367
|
file_path: pathlib.Path = self.path / filename
|
@@ -260,15 +376,19 @@ class FilesystemStorage(Storage):
|
|
260
376
|
file_path: pathlib.Path = self.path / filename
|
261
377
|
|
262
378
|
file_path.unlink(missing_ok=True)
|
379
|
+
for key in self.list_metadata(filename):
|
380
|
+
self._get_metadata_path(filename, key).unlink(missing_ok=True)
|
263
381
|
|
264
|
-
def list(self) -> List[
|
382
|
+
def list(self) -> List[FileWithMetadata]:
|
265
383
|
"""See FileCacherBackend.list()."""
|
266
384
|
res = []
|
267
385
|
for path in self.path.glob('*'):
|
268
386
|
if path.is_file():
|
387
|
+
filename = str(path.relative_to(self.path))
|
269
388
|
res.append(
|
270
|
-
|
271
|
-
filename=
|
389
|
+
FileWithMetadata(
|
390
|
+
filename=filename,
|
391
|
+
metadata=self.list_metadata(filename),
|
272
392
|
)
|
273
393
|
)
|
274
394
|
return res
|
@@ -277,4 +397,18 @@ class FilesystemStorage(Storage):
|
|
277
397
|
file_path = self.path / filename
|
278
398
|
if not file_path.is_file():
|
279
399
|
raise KeyError('File not found.')
|
400
|
+
|
401
|
+
compression_metadata = self.get_metadata(
|
402
|
+
filename, 'compression', CompressionMetadata
|
403
|
+
)
|
404
|
+
if compression_metadata is not None:
|
405
|
+
return None
|
280
406
|
return file_path
|
407
|
+
|
408
|
+
def filename_from_symlink(self, link: pathlib.Path) -> Optional[str]:
|
409
|
+
if not link.is_symlink():
|
410
|
+
return None
|
411
|
+
filename = link.readlink().resolve()
|
412
|
+
if not filename.is_file():
|
413
|
+
return None
|
414
|
+
return str(filename.relative_to(self.path))
|
rbx/grading/profiling.py
ADDED
@@ -0,0 +1,126 @@
|
|
1
|
+
import contextvars
|
2
|
+
import functools
|
3
|
+
import math
|
4
|
+
import threading
|
5
|
+
import time
|
6
|
+
|
7
|
+
ALL_CONTEXTS_BY_NAME = {}
|
8
|
+
_ALL_CONTEXTS_BY_NAME_LOCK = threading.Lock()
|
9
|
+
|
10
|
+
|
11
|
+
@functools.cache
|
12
|
+
def _get_threadsafe_context(name: str) -> 'Context':
|
13
|
+
with _ALL_CONTEXTS_BY_NAME_LOCK:
|
14
|
+
if name not in ALL_CONTEXTS_BY_NAME:
|
15
|
+
ALL_CONTEXTS_BY_NAME[name] = Context(name)
|
16
|
+
return ALL_CONTEXTS_BY_NAME[name]
|
17
|
+
|
18
|
+
|
19
|
+
class Distribution:
|
20
|
+
def __init__(self):
|
21
|
+
self.values = []
|
22
|
+
|
23
|
+
def add(self, value: float):
|
24
|
+
self.values.append(value)
|
25
|
+
|
26
|
+
def mean(self) -> float:
|
27
|
+
return sum(self.values) / len(self.values)
|
28
|
+
|
29
|
+
def median(self) -> float:
|
30
|
+
return sorted(self.values)[len(self.values) // 2]
|
31
|
+
|
32
|
+
def stddev(self) -> float:
|
33
|
+
mean = self.mean()
|
34
|
+
return math.sqrt(sum((x - mean) ** 2 for x in self.values) / len(self.values))
|
35
|
+
|
36
|
+
|
37
|
+
class Context:
|
38
|
+
def __init__(self, name: str):
|
39
|
+
self.name = name
|
40
|
+
self._lock = threading.Lock()
|
41
|
+
self.distributions = {}
|
42
|
+
self.counters = {}
|
43
|
+
|
44
|
+
def add_to_distribution(self, name: str, value: float):
|
45
|
+
with self._lock:
|
46
|
+
if name not in self.distributions:
|
47
|
+
self.distributions[name] = Distribution()
|
48
|
+
self.distributions[name].add(value)
|
49
|
+
|
50
|
+
def add_to_counter(self, name: str):
|
51
|
+
with self._lock:
|
52
|
+
if name not in self.counters:
|
53
|
+
self.counters[name] = 0
|
54
|
+
self.counters[name] += 1
|
55
|
+
|
56
|
+
def print_summary(self):
|
57
|
+
with self._lock:
|
58
|
+
print(f'{self.name}:')
|
59
|
+
for name, distribution in sorted(self.distributions.items()):
|
60
|
+
print(f' ~ {name}: {distribution.mean():.2f}')
|
61
|
+
for name, count in sorted(self.counters.items()):
|
62
|
+
print(f' + {name}: {count}')
|
63
|
+
|
64
|
+
|
65
|
+
profiling_stack_var = contextvars.ContextVar(
|
66
|
+
'profiling_stack', default=[_get_threadsafe_context('root')]
|
67
|
+
)
|
68
|
+
|
69
|
+
|
70
|
+
def _push_profiling_stack(name: str):
|
71
|
+
return profiling_stack_var.set(
|
72
|
+
profiling_stack_var.get() + [_get_threadsafe_context(name)]
|
73
|
+
)
|
74
|
+
|
75
|
+
|
76
|
+
class PushContext:
|
77
|
+
def __init__(self, name: str):
|
78
|
+
self.name = name
|
79
|
+
self.token = None
|
80
|
+
|
81
|
+
def __enter__(self):
|
82
|
+
self.token = _push_profiling_stack(self.name)
|
83
|
+
return profiling_stack_var.get()[-1]
|
84
|
+
|
85
|
+
def __exit__(self, exc_type, exc_value, traceback):
|
86
|
+
if self.token is not None:
|
87
|
+
profiling_stack_var.reset(self.token)
|
88
|
+
|
89
|
+
|
90
|
+
def print_summary():
|
91
|
+
print('\n' + ('-') * 3 + '\n')
|
92
|
+
with _ALL_CONTEXTS_BY_NAME_LOCK:
|
93
|
+
for context in ALL_CONTEXTS_BY_NAME.values():
|
94
|
+
context.print_summary()
|
95
|
+
|
96
|
+
|
97
|
+
### Public API
|
98
|
+
|
99
|
+
|
100
|
+
class Profiler:
|
101
|
+
def __init__(self, name: str, start: bool = False):
|
102
|
+
self.name = name
|
103
|
+
self.start_time = 0
|
104
|
+
if start:
|
105
|
+
self.start()
|
106
|
+
|
107
|
+
def start(self):
|
108
|
+
self.start_time = time.monotonic()
|
109
|
+
return self
|
110
|
+
|
111
|
+
def stop(self):
|
112
|
+
self.end_time = time.monotonic()
|
113
|
+
self.duration = self.end_time - self.start_time
|
114
|
+
for context in profiling_stack_var.get():
|
115
|
+
context.add_to_distribution(self.name, self.duration)
|
116
|
+
|
117
|
+
def __enter__(self):
|
118
|
+
return self.start()
|
119
|
+
|
120
|
+
def __exit__(self, exc_type, exc_value, traceback):
|
121
|
+
self.stop()
|
122
|
+
|
123
|
+
|
124
|
+
def add_to_counter(name: str):
|
125
|
+
for context in profiling_stack_var.get():
|
126
|
+
context.add_to_counter(name)
|
rbx/grading/steps.py
CHANGED
@@ -20,9 +20,10 @@ from rich.text import Text
|
|
20
20
|
from rbx import utils
|
21
21
|
from rbx.config import get_bits_stdcpp, get_jngen, get_testlib
|
22
22
|
from rbx.console import console
|
23
|
-
from rbx.grading import processing_context
|
23
|
+
from rbx.grading import grading_context, processing_context
|
24
|
+
from rbx.grading.judge.cacher import FileCacher
|
24
25
|
from rbx.grading.judge.sandbox import SandboxBase, SandboxParams
|
25
|
-
from rbx.grading.judge.storage import
|
26
|
+
from rbx.grading.judge.storage import copyfileobj
|
26
27
|
from rbx.grading.limits import Limits
|
27
28
|
|
28
29
|
MAX_STDOUT_LEN = 1024 * 1024 * 128 # 128 MB
|
@@ -139,6 +140,8 @@ class GradingFileInput(BaseModel):
|
|
139
140
|
digest: Optional[DigestHolder] = None
|
140
141
|
# Whether the destination file should be marked as an executable.
|
141
142
|
executable: bool = False
|
143
|
+
# Whether to track file through its hash (disable for optimization).
|
144
|
+
hash: bool = True
|
142
145
|
|
143
146
|
|
144
147
|
class GradingFileOutput(BaseModel):
|
@@ -161,15 +164,15 @@ class GradingFileOutput(BaseModel):
|
|
161
164
|
# Whether to touch the file before the command runs.
|
162
165
|
touch: bool = False
|
163
166
|
|
164
|
-
def get_file(self,
|
167
|
+
def get_file(self, cacher: FileCacher) -> Optional[IO[bytes]]:
|
165
168
|
if self.dest is not None:
|
166
169
|
if self.optional and not self.dest.exists():
|
167
170
|
return None
|
168
171
|
return self.dest.open('rb')
|
169
172
|
if self.digest is not None and self.digest.value is not None:
|
170
|
-
if self.optional and not
|
173
|
+
if self.optional and not cacher.exists(self.digest.value):
|
171
174
|
return None
|
172
|
-
return
|
175
|
+
return cacher.get_file(self.digest.value)
|
173
176
|
raise ValueError('No file to get')
|
174
177
|
|
175
178
|
|
@@ -308,11 +311,14 @@ def _process_input_artifacts(artifacts: GradingArtifacts, sandbox: SandboxBase):
|
|
308
311
|
|
309
312
|
|
310
313
|
def _process_output_artifacts(
|
311
|
-
artifacts: GradingArtifacts,
|
314
|
+
artifacts: GradingArtifacts,
|
315
|
+
sandbox: SandboxBase,
|
312
316
|
) -> bool:
|
313
317
|
for output_artifact in artifacts.outputs:
|
314
318
|
if output_artifact.hash and output_artifact.digest is None:
|
315
|
-
|
319
|
+
if not grading_context.is_no_cache():
|
320
|
+
# If cache is enabled, track this file in cache.
|
321
|
+
output_artifact.digest = DigestHolder()
|
316
322
|
if not sandbox.file_exists(output_artifact.src):
|
317
323
|
if output_artifact.optional:
|
318
324
|
continue
|
@@ -322,21 +328,43 @@ def _process_output_artifacts(
|
|
322
328
|
return False
|
323
329
|
|
324
330
|
if output_artifact.digest is not None:
|
325
|
-
|
326
|
-
|
327
|
-
|
331
|
+
# Put it in the cache, possibly compressing it if it's an executable.
|
332
|
+
with grading_context.compression(
|
333
|
+
use_compression=True,
|
334
|
+
when=output_artifact.executable,
|
335
|
+
):
|
336
|
+
output_artifact.digest.value = sandbox.get_file_to_storage(
|
337
|
+
output_artifact.src, trunc_len=output_artifact.maxlen
|
338
|
+
)
|
328
339
|
if output_artifact.dest is None:
|
329
340
|
continue
|
330
341
|
dst: pathlib.Path = artifacts.root / output_artifact.dest
|
331
342
|
# Ensure dst directory exists.
|
343
|
+
|
332
344
|
dst.parent.mkdir(parents=True, exist_ok=True)
|
333
|
-
|
334
|
-
|
335
|
-
|
336
|
-
|
337
|
-
|
338
|
-
|
345
|
+
|
346
|
+
if (
|
347
|
+
output_artifact.digest is not None
|
348
|
+
and output_artifact.digest.value is not None
|
349
|
+
and (
|
350
|
+
path_to_symlink := sandbox.file_cacher.path_for_symlink(
|
351
|
+
output_artifact.digest.value
|
339
352
|
)
|
353
|
+
)
|
354
|
+
is not None
|
355
|
+
):
|
356
|
+
# File is in the persistent cache, store a symlink to it.
|
357
|
+
dst.unlink(missing_ok=True)
|
358
|
+
dst.symlink_to(path_to_symlink)
|
359
|
+
else:
|
360
|
+
# File is not in the persistent cache, copy it.
|
361
|
+
with dst.open('wb') as f:
|
362
|
+
with sandbox.get_file(output_artifact.src) as sb_f:
|
363
|
+
copyfileobj(
|
364
|
+
sb_f,
|
365
|
+
f,
|
366
|
+
maxlen=output_artifact.maxlen,
|
367
|
+
)
|
340
368
|
if output_artifact.executable:
|
341
369
|
dst.chmod(0o755)
|
342
370
|
return True
|