rbx.cp 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- rbx/__init__.py +0 -0
- rbx/annotations.py +127 -0
- rbx/autoenum.py +333 -0
- rbx/box/__init__.py +0 -0
- rbx/box/builder.py +77 -0
- rbx/box/cd.py +37 -0
- rbx/box/checkers.py +134 -0
- rbx/box/code.py +185 -0
- rbx/box/compile.py +56 -0
- rbx/box/conftest.py +42 -0
- rbx/box/contest/__init__.py +0 -0
- rbx/box/contest/build_contest_statements.py +347 -0
- rbx/box/contest/contest_package.py +76 -0
- rbx/box/contest/contest_utils.py +20 -0
- rbx/box/contest/main.py +179 -0
- rbx/box/contest/schema.py +155 -0
- rbx/box/contest/statements.py +82 -0
- rbx/box/creation.py +72 -0
- rbx/box/download.py +64 -0
- rbx/box/environment.py +345 -0
- rbx/box/extensions.py +26 -0
- rbx/box/generators.py +478 -0
- rbx/box/generators_test.py +63 -0
- rbx/box/main.py +449 -0
- rbx/box/package.py +316 -0
- rbx/box/packaging/boca/extension.py +27 -0
- rbx/box/packaging/boca/packager.py +245 -0
- rbx/box/packaging/contest_main.py +82 -0
- rbx/box/packaging/main.py +68 -0
- rbx/box/packaging/packager.py +117 -0
- rbx/box/packaging/polygon/packager.py +320 -0
- rbx/box/packaging/polygon/test.py +81 -0
- rbx/box/packaging/polygon/xml_schema.py +106 -0
- rbx/box/presets/__init__.py +503 -0
- rbx/box/presets/fetch.py +70 -0
- rbx/box/presets/lock_schema.py +20 -0
- rbx/box/presets/schema.py +59 -0
- rbx/box/schema.py +394 -0
- rbx/box/solutions.py +792 -0
- rbx/box/solutions_test.py +41 -0
- rbx/box/statements/__init__.py +0 -0
- rbx/box/statements/build_statements.py +359 -0
- rbx/box/statements/builders.py +375 -0
- rbx/box/statements/joiners.py +113 -0
- rbx/box/statements/latex.py +47 -0
- rbx/box/statements/latex_jinja.py +214 -0
- rbx/box/statements/schema.py +138 -0
- rbx/box/stresses.py +292 -0
- rbx/box/stressing/__init__.py +0 -0
- rbx/box/stressing/finder_parser.py +359 -0
- rbx/box/stressing/generator_parser.py +258 -0
- rbx/box/testcases.py +54 -0
- rbx/box/ui/__init__.py +0 -0
- rbx/box/ui/captured_log.py +372 -0
- rbx/box/ui/css/app.tcss +48 -0
- rbx/box/ui/main.py +38 -0
- rbx/box/ui/run.py +209 -0
- rbx/box/validators.py +245 -0
- rbx/box/validators_test.py +15 -0
- rbx/checker.py +128 -0
- rbx/clone.py +197 -0
- rbx/config.py +271 -0
- rbx/conftest.py +38 -0
- rbx/console.py +27 -0
- rbx/create.py +37 -0
- rbx/edit.py +24 -0
- rbx/grading/__init__.py +0 -0
- rbx/grading/caching.py +356 -0
- rbx/grading/conftest.py +33 -0
- rbx/grading/judge/__init__.py +0 -0
- rbx/grading/judge/cacher.py +503 -0
- rbx/grading/judge/digester.py +35 -0
- rbx/grading/judge/sandbox.py +748 -0
- rbx/grading/judge/sandboxes/__init__.py +0 -0
- rbx/grading/judge/sandboxes/isolate.py +683 -0
- rbx/grading/judge/sandboxes/stupid_sandbox.py +310 -0
- rbx/grading/judge/sandboxes/timeit.py +217 -0
- rbx/grading/judge/storage.py +284 -0
- rbx/grading/judge/test.py +38 -0
- rbx/grading/judge/testiso.py +54 -0
- rbx/grading/steps.py +522 -0
- rbx/grading/steps_with_caching.py +59 -0
- rbx/grading/steps_with_caching_run_test.py +429 -0
- rbx/grading_utils.py +148 -0
- rbx/hydration.py +101 -0
- rbx/main.py +122 -0
- rbx/metadata.py +105 -0
- rbx/providers/__init__.py +43 -0
- rbx/providers/codeforces.py +73 -0
- rbx/providers/provider.py +26 -0
- rbx/resources/checkers/boilerplate.cpp +20 -0
- rbx/resources/default_config.json +48 -0
- rbx/resources/envs/default.rbx.yml +37 -0
- rbx/resources/envs/isolate.rbx.yml +37 -0
- rbx/resources/packagers/boca/checker.sh +43 -0
- rbx/resources/packagers/boca/compare +53 -0
- rbx/resources/packagers/boca/compile/c +172 -0
- rbx/resources/packagers/boca/compile/cc +173 -0
- rbx/resources/packagers/boca/compile/cpp +172 -0
- rbx/resources/packagers/boca/compile/java +194 -0
- rbx/resources/packagers/boca/compile/kt +155 -0
- rbx/resources/packagers/boca/compile/pas +172 -0
- rbx/resources/packagers/boca/compile/py2 +173 -0
- rbx/resources/packagers/boca/compile/py3 +173 -0
- rbx/resources/packagers/boca/run/c +128 -0
- rbx/resources/packagers/boca/run/cc +128 -0
- rbx/resources/packagers/boca/run/cpp +128 -0
- rbx/resources/packagers/boca/run/java +194 -0
- rbx/resources/packagers/boca/run/kt +159 -0
- rbx/resources/packagers/boca/run/py2 +166 -0
- rbx/resources/packagers/boca/run/py3 +166 -0
- rbx/resources/presets/default/contest/contest.rbx.yml +14 -0
- rbx/resources/presets/default/contest/statement/contest.rbx.tex +97 -0
- rbx/resources/presets/default/contest/statement/olymp.sty +250 -0
- rbx/resources/presets/default/contest/statement/template.rbx.tex +42 -0
- rbx/resources/presets/default/preset.rbx.yml +12 -0
- rbx/resources/presets/default/problem/.gitignore +6 -0
- rbx/resources/presets/default/problem/gen.cpp +9 -0
- rbx/resources/presets/default/problem/problem.rbx.yml +44 -0
- rbx/resources/presets/default/problem/random.py +3 -0
- rbx/resources/presets/default/problem/random.txt +2 -0
- rbx/resources/presets/default/problem/sols/main.cpp +9 -0
- rbx/resources/presets/default/problem/sols/slow.cpp +15 -0
- rbx/resources/presets/default/problem/sols/wa.cpp +9 -0
- rbx/resources/presets/default/problem/statement/olymp.sty +250 -0
- rbx/resources/presets/default/problem/statement/projecao.png +0 -0
- rbx/resources/presets/default/problem/statement/statement.rbx.tex +18 -0
- rbx/resources/presets/default/problem/statement/template.rbx.tex +89 -0
- rbx/resources/presets/default/problem/tests/samples/000.in +1 -0
- rbx/resources/presets/default/problem/tests/samples/001.in +1 -0
- rbx/resources/presets/default/problem/validator.cpp +16 -0
- rbx/resources/presets/default/problem/wcmp.cpp +34 -0
- rbx/resources/templates/template.cpp +19 -0
- rbx/run.py +45 -0
- rbx/schema.py +64 -0
- rbx/submit.py +61 -0
- rbx/submitors/__init__.py +18 -0
- rbx/submitors/codeforces.py +120 -0
- rbx/submitors/submitor.py +25 -0
- rbx/test.py +347 -0
- rbx/testcase.py +70 -0
- rbx/testcase_rendering.py +79 -0
- rbx/testdata/box1/gen1.cpp +7 -0
- rbx/testdata/box1/gen2.cpp +9 -0
- rbx/testdata/box1/genScript.py +2 -0
- rbx/testdata/box1/hard-tle.sol.cpp +26 -0
- rbx/testdata/box1/ole.cpp +17 -0
- rbx/testdata/box1/problem.rbx.yml +39 -0
- rbx/testdata/box1/re.sol.cpp +23 -0
- rbx/testdata/box1/sol.cpp +22 -0
- rbx/testdata/box1/tests/1.in +1 -0
- rbx/testdata/box1/tle-and-incorrect.sol.cpp +33 -0
- rbx/testdata/box1/tle.sol.cpp +35 -0
- rbx/testdata/box1/validator.cpp +11 -0
- rbx/testdata/box1/wa.sol.cpp +22 -0
- rbx/testdata/caching/executable.py +1 -0
- rbx/testdata/compatible +0 -0
- rbx/testing_utils.py +65 -0
- rbx/utils.py +162 -0
- rbx_cp-0.5.0.dist-info/LICENSE +201 -0
- rbx_cp-0.5.0.dist-info/METADATA +89 -0
- rbx_cp-0.5.0.dist-info/RECORD +164 -0
- rbx_cp-0.5.0.dist-info/WHEEL +4 -0
- rbx_cp-0.5.0.dist-info/entry_points.txt +4 -0
rbx/grading/caching.py
ADDED
@@ -0,0 +1,356 @@
|
|
1
|
+
import atexit
|
2
|
+
import io
|
3
|
+
import os
|
4
|
+
import pathlib
|
5
|
+
import shelve
|
6
|
+
from typing import Any, Dict, List, Optional
|
7
|
+
|
8
|
+
from pydantic import BaseModel
|
9
|
+
|
10
|
+
from rbx.grading.judge.digester import digest_cooperatively
|
11
|
+
from rbx.grading.judge.storage import Storage, copyfileobj
|
12
|
+
from rbx.grading.steps import DigestHolder, GradingArtifacts, GradingLogsHolder
|
13
|
+
|
14
|
+
|
15
|
+
class CacheInput(BaseModel):
|
16
|
+
"""
|
17
|
+
The exact command that was executed, together with
|
18
|
+
its set of input and output artifacts.
|
19
|
+
|
20
|
+
This is used as a cache key, which means that, if across
|
21
|
+
executions, the command, or the set of artifacts it
|
22
|
+
consumes/produces changes, then there will be a cache key
|
23
|
+
change, and thus the command will be re-run.
|
24
|
+
"""
|
25
|
+
|
26
|
+
commands: List[str]
|
27
|
+
artifacts: List[GradingArtifacts]
|
28
|
+
extra_params: Dict[str, Any] = {}
|
29
|
+
|
30
|
+
|
31
|
+
class CacheFingerprint(BaseModel):
|
32
|
+
"""
|
33
|
+
The state of the artifacts that are not stored in the
|
34
|
+
cache key (usually for efficiency/key size reasons), such
|
35
|
+
as the hashes of every FS input artifact, or the hashes of
|
36
|
+
the produced artifacts.
|
37
|
+
|
38
|
+
This is used for a few things:
|
39
|
+
- Check whether the IO files have changed in disk since
|
40
|
+
this command was cached, and evict this entry in such case.
|
41
|
+
- Check whether the caching storage has changed since this
|
42
|
+
command was cached, and evict this entry in such case.
|
43
|
+
- Store small side-effects of the cached execution, such as
|
44
|
+
execution time, memory, exit codes, etc.
|
45
|
+
"""
|
46
|
+
|
47
|
+
digests: List[Optional[str]]
|
48
|
+
fingerprints: List[str]
|
49
|
+
output_fingerprints: List[str]
|
50
|
+
logs: List[GradingLogsHolder]
|
51
|
+
|
52
|
+
|
53
|
+
class NoCacheException(Exception):
|
54
|
+
pass
|
55
|
+
|
56
|
+
|
57
|
+
def _check_digests(artifacts_list: List[GradingArtifacts]):
|
58
|
+
produced = set()
|
59
|
+
for artifacts in artifacts_list:
|
60
|
+
for input in artifacts.inputs:
|
61
|
+
if input.digest is None:
|
62
|
+
continue
|
63
|
+
if input.digest.value is not None:
|
64
|
+
continue
|
65
|
+
if id(input.digest) not in produced:
|
66
|
+
raise ValueError('Digests must be produced before being consumed')
|
67
|
+
for output in artifacts.outputs:
|
68
|
+
if output.digest is None:
|
69
|
+
continue
|
70
|
+
if output.digest.value is not None:
|
71
|
+
continue
|
72
|
+
if id(output.digest) in produced:
|
73
|
+
raise ValueError('A digest cannot be produced more than once')
|
74
|
+
produced.add(id(output.digest))
|
75
|
+
|
76
|
+
|
77
|
+
def _build_digest_list(artifacts_list: List[GradingArtifacts]) -> List[DigestHolder]:
|
78
|
+
digests = []
|
79
|
+
for artifacts in artifacts_list:
|
80
|
+
for output in artifacts.outputs:
|
81
|
+
if output.hash and output.digest is None:
|
82
|
+
output.digest = DigestHolder()
|
83
|
+
if output.digest is None:
|
84
|
+
continue
|
85
|
+
digests.append(output.digest)
|
86
|
+
return digests
|
87
|
+
|
88
|
+
|
89
|
+
def _build_fingerprint_list(artifacts_list: List[GradingArtifacts]) -> List[str]:
|
90
|
+
fingerprints = []
|
91
|
+
for artifacts in artifacts_list:
|
92
|
+
for input in artifacts.inputs:
|
93
|
+
if input.src is None:
|
94
|
+
continue
|
95
|
+
with input.src.open('rb') as f:
|
96
|
+
fingerprints.append(digest_cooperatively(f))
|
97
|
+
return fingerprints
|
98
|
+
|
99
|
+
|
100
|
+
def _build_output_fingerprint_list(artifacts_list: List[GradingArtifacts]) -> List[str]:
|
101
|
+
fingerprints = []
|
102
|
+
for artifacts in artifacts_list:
|
103
|
+
for output in artifacts.outputs:
|
104
|
+
if output.dest is None or output.intermediate or output.hash:
|
105
|
+
continue
|
106
|
+
if not output.dest.is_file():
|
107
|
+
fingerprints.append('') # file does not exist
|
108
|
+
continue
|
109
|
+
with output.dest.open('rb') as f:
|
110
|
+
fingerprints.append(digest_cooperatively(f))
|
111
|
+
return fingerprints
|
112
|
+
|
113
|
+
|
114
|
+
def _build_logs_list(artifacts_list: List[GradingArtifacts]) -> List[GradingLogsHolder]:
|
115
|
+
logs = []
|
116
|
+
for artifacts in artifacts_list:
|
117
|
+
if artifacts.logs is not None:
|
118
|
+
logs.append(artifacts.logs)
|
119
|
+
return logs
|
120
|
+
|
121
|
+
|
122
|
+
def _build_cache_fingerprint(
|
123
|
+
artifacts_list: List[GradingArtifacts],
|
124
|
+
) -> CacheFingerprint:
|
125
|
+
digests = [digest.value for digest in _build_digest_list(artifacts_list)]
|
126
|
+
fingerprints = _build_fingerprint_list(artifacts_list)
|
127
|
+
output_fingerprints = _build_output_fingerprint_list(artifacts_list)
|
128
|
+
logs = _build_logs_list(artifacts_list)
|
129
|
+
return CacheFingerprint(
|
130
|
+
digests=digests,
|
131
|
+
fingerprints=fingerprints,
|
132
|
+
output_fingerprints=output_fingerprints,
|
133
|
+
logs=logs,
|
134
|
+
)
|
135
|
+
|
136
|
+
|
137
|
+
def _fingerprints_match(
|
138
|
+
fingerprint: CacheFingerprint, reference: CacheFingerprint
|
139
|
+
) -> bool:
|
140
|
+
lhs, rhs = fingerprint.fingerprints, reference.fingerprints
|
141
|
+
return tuple(lhs) == tuple(rhs)
|
142
|
+
|
143
|
+
|
144
|
+
def _output_fingerprints_match(
|
145
|
+
fingerprint: CacheFingerprint, reference: CacheFingerprint
|
146
|
+
) -> bool:
|
147
|
+
lhs, rhs = fingerprint.output_fingerprints, reference.output_fingerprints
|
148
|
+
return tuple(lhs) == tuple(rhs)
|
149
|
+
|
150
|
+
|
151
|
+
def _build_cache_input(
|
152
|
+
commands: List[str],
|
153
|
+
artifact_list: List[GradingArtifacts],
|
154
|
+
extra_params: Dict[str, Any],
|
155
|
+
) -> CacheInput:
|
156
|
+
cloned_artifact_list = [
|
157
|
+
artifacts.model_copy(deep=True) for artifacts in artifact_list
|
158
|
+
]
|
159
|
+
for artifacts in cloned_artifact_list:
|
160
|
+
for output in artifacts.outputs:
|
161
|
+
if output.hash:
|
162
|
+
# Cleanup dest field from hash artifacts
|
163
|
+
# since they only their digest value should
|
164
|
+
# be tracked by cache.
|
165
|
+
output.dest = None
|
166
|
+
return CacheInput(
|
167
|
+
commands=commands, artifacts=cloned_artifact_list, extra_params=extra_params
|
168
|
+
)
|
169
|
+
|
170
|
+
|
171
|
+
def _build_cache_key(input: CacheInput) -> str:
|
172
|
+
with io.BytesIO(input.model_dump_json().encode()) as fobj:
|
173
|
+
return digest_cooperatively(fobj)
|
174
|
+
|
175
|
+
|
176
|
+
def _copy_hashed_files(artifact_list: List[GradingArtifacts], storage: Storage):
|
177
|
+
for artifact in artifact_list:
|
178
|
+
for output in artifact.outputs:
|
179
|
+
if not output.hash or output.dest is None:
|
180
|
+
continue
|
181
|
+
assert output.digest is not None
|
182
|
+
if output.optional and output.digest.value is None:
|
183
|
+
continue
|
184
|
+
assert output.digest.value is not None
|
185
|
+
with storage.get_file(output.digest.value) as fobj:
|
186
|
+
with output.dest.open('wb') as f:
|
187
|
+
copyfileobj(fobj, f, maxlen=output.maxlen)
|
188
|
+
if output.executable:
|
189
|
+
output.dest.chmod(0o755)
|
190
|
+
|
191
|
+
|
192
|
+
def is_artifact_ok(artifact: GradingArtifacts, storage: Storage) -> bool:
|
193
|
+
for output in artifact.outputs:
|
194
|
+
if output.optional or output.intermediate:
|
195
|
+
continue
|
196
|
+
if output.digest is not None:
|
197
|
+
if output.digest.value is None or not storage.exists(output.digest.value):
|
198
|
+
return False
|
199
|
+
return True
|
200
|
+
assert output.dest is not None
|
201
|
+
file_path: pathlib.Path = artifact.root / output.dest
|
202
|
+
if not file_path.is_file():
|
203
|
+
return False
|
204
|
+
executable = os.access(str(file_path), os.X_OK)
|
205
|
+
if executable != output.executable:
|
206
|
+
return False
|
207
|
+
return True
|
208
|
+
|
209
|
+
|
210
|
+
def are_artifacts_ok(artifacts: List[GradingArtifacts], storage: Storage) -> bool:
|
211
|
+
for artifact in artifacts:
|
212
|
+
if not is_artifact_ok(artifact, storage):
|
213
|
+
return False
|
214
|
+
return True
|
215
|
+
|
216
|
+
|
217
|
+
class DependencyCacheBlock:
|
218
|
+
class Break(Exception):
|
219
|
+
pass
|
220
|
+
|
221
|
+
def __init__(
|
222
|
+
self,
|
223
|
+
cache: 'DependencyCache',
|
224
|
+
commands: List[str],
|
225
|
+
artifact_list: List[GradingArtifacts],
|
226
|
+
extra_params: Dict[str, Any],
|
227
|
+
):
|
228
|
+
self.cache = cache
|
229
|
+
self.commands = commands
|
230
|
+
self.artifact_list = artifact_list
|
231
|
+
self.extra_params = extra_params
|
232
|
+
self._key = None
|
233
|
+
|
234
|
+
def __enter__(self):
|
235
|
+
input = _build_cache_input(
|
236
|
+
commands=self.commands,
|
237
|
+
artifact_list=self.artifact_list,
|
238
|
+
extra_params=self.extra_params,
|
239
|
+
)
|
240
|
+
self._key = _build_cache_key(input)
|
241
|
+
found = self.cache.find_in_cache(
|
242
|
+
self.commands, self.artifact_list, self.extra_params, key=self._key
|
243
|
+
)
|
244
|
+
return found
|
245
|
+
|
246
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
247
|
+
if exc_type is None:
|
248
|
+
self.cache.store_in_cache(
|
249
|
+
self.commands, self.artifact_list, self.extra_params, key=self._key
|
250
|
+
)
|
251
|
+
if exc_type is NoCacheException:
|
252
|
+
return True
|
253
|
+
return None
|
254
|
+
|
255
|
+
|
256
|
+
class DependencyCache:
|
257
|
+
root: pathlib.Path
|
258
|
+
storage: Storage
|
259
|
+
|
260
|
+
def __init__(self, root: pathlib.Path, storage: Storage):
|
261
|
+
self.root = root
|
262
|
+
self.storage = storage
|
263
|
+
self.db = shelve.open(self._cache_name())
|
264
|
+
atexit.register(lambda: self.db.close())
|
265
|
+
|
266
|
+
def _cache_name(self) -> str:
|
267
|
+
return str(self.root / '.cache_db')
|
268
|
+
|
269
|
+
def _find_in_cache(self, key: str) -> Optional[CacheFingerprint]:
|
270
|
+
return self.db.get(key)
|
271
|
+
|
272
|
+
def _store_in_cache(self, key: str, fingerprint: CacheFingerprint):
|
273
|
+
self.db[key] = fingerprint
|
274
|
+
|
275
|
+
def _evict_from_cache(self, key: str):
|
276
|
+
if key in self.db:
|
277
|
+
del self.db[key]
|
278
|
+
|
279
|
+
def __call__(
|
280
|
+
self,
|
281
|
+
commands: List[str],
|
282
|
+
artifact_list: List[GradingArtifacts],
|
283
|
+
extra_params: Optional[Dict[str, Any]] = None,
|
284
|
+
) -> DependencyCacheBlock:
|
285
|
+
_check_digests(artifact_list)
|
286
|
+
return DependencyCacheBlock(self, commands, artifact_list, extra_params or {})
|
287
|
+
|
288
|
+
def find_in_cache(
|
289
|
+
self,
|
290
|
+
commands: List[str],
|
291
|
+
artifact_list: List[GradingArtifacts],
|
292
|
+
extra_params: Dict[str, Any],
|
293
|
+
key: Optional[str] = None,
|
294
|
+
) -> bool:
|
295
|
+
input = _build_cache_input(
|
296
|
+
commands=commands, artifact_list=artifact_list, extra_params=extra_params
|
297
|
+
)
|
298
|
+
key = key or _build_cache_key(input)
|
299
|
+
|
300
|
+
fingerprint = self._find_in_cache(key)
|
301
|
+
if fingerprint is None:
|
302
|
+
return False
|
303
|
+
|
304
|
+
reference_fingerprint = _build_cache_fingerprint(artifact_list)
|
305
|
+
|
306
|
+
if not _fingerprints_match(fingerprint, reference_fingerprint):
|
307
|
+
self._evict_from_cache(key)
|
308
|
+
return False
|
309
|
+
|
310
|
+
if not _output_fingerprints_match(fingerprint, reference_fingerprint):
|
311
|
+
self._evict_from_cache(key)
|
312
|
+
return False
|
313
|
+
|
314
|
+
reference_digests = _build_digest_list(artifact_list)
|
315
|
+
|
316
|
+
# Apply digest changes.
|
317
|
+
old_digest_values = [digest for digest in reference_fingerprint.digests]
|
318
|
+
for digest, reference_digest in zip(fingerprint.digests, reference_digests):
|
319
|
+
reference_digest.value = digest
|
320
|
+
|
321
|
+
if not are_artifacts_ok(artifact_list, self.storage):
|
322
|
+
# Rollback digest changes.
|
323
|
+
for old_digest_value, reference_digest in zip(
|
324
|
+
old_digest_values, reference_digests
|
325
|
+
):
|
326
|
+
reference_digest.value = old_digest_value
|
327
|
+
self._evict_from_cache(key)
|
328
|
+
return False
|
329
|
+
|
330
|
+
# Copy hashed files to file system.
|
331
|
+
_copy_hashed_files(artifact_list, self.storage)
|
332
|
+
|
333
|
+
# Apply logs changes.
|
334
|
+
for logs, reference_logs in zip(fingerprint.logs, reference_fingerprint.logs):
|
335
|
+
if logs.run is not None:
|
336
|
+
reference_logs.run = logs.run.model_copy(deep=True)
|
337
|
+
reference_logs.cached = True
|
338
|
+
|
339
|
+
return True
|
340
|
+
|
341
|
+
def store_in_cache(
|
342
|
+
self,
|
343
|
+
commands: List[str],
|
344
|
+
artifact_list: List[GradingArtifacts],
|
345
|
+
extra_params: Dict[str, Any],
|
346
|
+
key: Optional[str] = None,
|
347
|
+
):
|
348
|
+
input = CacheInput(
|
349
|
+
commands=commands, artifacts=artifact_list, extra_params=extra_params
|
350
|
+
)
|
351
|
+
key = key or _build_cache_key(input)
|
352
|
+
|
353
|
+
if not are_artifacts_ok(artifact_list, self.storage):
|
354
|
+
return
|
355
|
+
|
356
|
+
self._store_in_cache(key, _build_cache_fingerprint(artifact_list))
|
rbx/grading/conftest.py
ADDED
@@ -0,0 +1,33 @@
|
|
1
|
+
import pathlib
|
2
|
+
from collections.abc import Iterator
|
3
|
+
|
4
|
+
import pytest
|
5
|
+
|
6
|
+
from rbx.grading.caching import DependencyCache
|
7
|
+
from rbx.grading.judge.cacher import FileCacher
|
8
|
+
from rbx.grading.judge.sandbox import SandboxBase
|
9
|
+
from rbx.grading.judge.sandboxes.stupid_sandbox import StupidSandbox
|
10
|
+
from rbx.grading.judge.storage import FilesystemStorage, Storage
|
11
|
+
|
12
|
+
|
13
|
+
@pytest.fixture
|
14
|
+
def storage(request, cleandir: pathlib.Path) -> Iterator[Storage]:
|
15
|
+
storage_path = cleandir / '.box' / '.storage'
|
16
|
+
yield FilesystemStorage(storage_path)
|
17
|
+
|
18
|
+
|
19
|
+
@pytest.fixture
|
20
|
+
def file_cacher(request, storage: Storage) -> Iterator[FileCacher]:
|
21
|
+
yield FileCacher(storage)
|
22
|
+
|
23
|
+
|
24
|
+
@pytest.fixture
|
25
|
+
def sandbox(request, file_cacher: FileCacher) -> Iterator[SandboxBase]:
|
26
|
+
yield StupidSandbox(file_cacher=file_cacher)
|
27
|
+
|
28
|
+
|
29
|
+
@pytest.fixture
|
30
|
+
def dependency_cache(
|
31
|
+
request, cleandir: pathlib.Path, storage: Storage
|
32
|
+
) -> Iterator[DependencyCache]:
|
33
|
+
yield DependencyCache(cleandir / '.box', storage)
|
File without changes
|