rbx.cp 0.7.0__py3-none-any.whl → 0.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. rbx/box/cli.py +79 -31
  2. rbx/box/code.py +131 -82
  3. rbx/box/global_package.py +74 -0
  4. rbx/box/package.py +6 -19
  5. rbx/box/remote.py +19 -0
  6. rbx/box/sanitizers/warning_stack.py +3 -3
  7. rbx/box/solutions.py +13 -7
  8. rbx/box/stats.py +10 -0
  9. rbx/box/stresses.py +45 -64
  10. rbx/box/stressing/finder_parser.py +11 -16
  11. rbx/box/tasks.py +33 -22
  12. rbx/box/tooling/boca/scraper.py +1 -1
  13. rbx/grading/caching.py +98 -47
  14. rbx/grading/debug_context.py +31 -0
  15. rbx/grading/grading_context.py +96 -0
  16. rbx/grading/judge/cacher.py +93 -21
  17. rbx/grading/judge/sandbox.py +6 -3
  18. rbx/grading/judge/sandboxes/timeit.py +1 -1
  19. rbx/grading/judge/storage.py +169 -35
  20. rbx/grading/profiling.py +126 -0
  21. rbx/grading/steps.py +44 -16
  22. rbx/grading/steps_with_caching.py +52 -26
  23. rbx/resources/presets/default/contest/.gitignore +2 -0
  24. rbx/resources/presets/default/contest/contest.rbx.yml +14 -1
  25. rbx/resources/presets/default/contest/statement/contest.rbx.tex +25 -86
  26. rbx/resources/presets/default/contest/statement/icpc.sty +322 -0
  27. rbx/resources/presets/default/contest/statement/instructions.tex +40 -0
  28. rbx/resources/presets/default/contest/statement/logo.png +0 -0
  29. rbx/resources/presets/default/contest/statement/template.rbx.tex +45 -36
  30. rbx/resources/presets/default/preset.rbx.yml +2 -2
  31. rbx/resources/presets/default/problem/problem.rbx.yml +12 -8
  32. rbx/resources/presets/default/problem/statement/icpc.sty +322 -0
  33. rbx/resources/presets/default/problem/statement/template.rbx.tex +47 -79
  34. {rbx_cp-0.7.0.dist-info → rbx_cp-0.8.0.dist-info}/METADATA +3 -1
  35. {rbx_cp-0.7.0.dist-info → rbx_cp-0.8.0.dist-info}/RECORD +43 -36
  36. rbx/resources/presets/default/contest/statement/olymp.sty +0 -250
  37. rbx/resources/presets/default/problem/statement/olymp.sty +0 -250
  38. /rbx/resources/presets/default/problem/{gen.cpp → gens/gen.cpp} +0 -0
  39. /rbx/resources/presets/default/problem/{tests → manual_tests}/samples/000.in +0 -0
  40. /rbx/resources/presets/default/problem/{tests → manual_tests}/samples/001.in +0 -0
  41. /rbx/resources/presets/default/problem/{random.py → testplan/random.py} +0 -0
  42. /rbx/resources/presets/default/problem/{random.txt → testplan/random.txt} +0 -0
  43. {rbx_cp-0.7.0.dist-info → rbx_cp-0.8.0.dist-info}/LICENSE +0 -0
  44. {rbx_cp-0.7.0.dist-info → rbx_cp-0.8.0.dist-info}/WHEEL +0 -0
  45. {rbx_cp-0.7.0.dist-info → rbx_cp-0.8.0.dist-info}/entry_points.txt +0 -0
rbx/box/cli.py CHANGED
@@ -1,3 +1,4 @@
1
+ import atexit
1
2
  import pathlib
2
3
  import shlex
3
4
  import shutil
@@ -9,6 +10,7 @@ import rich
9
10
  import rich.prompt
10
11
  import syncer
11
12
  import typer
13
+ from ordered_set import OrderedSet
12
14
 
13
15
  from rbx import annotations, config, console, utils
14
16
  from rbx.box import (
@@ -18,6 +20,7 @@ from rbx.box import (
18
20
  download,
19
21
  environment,
20
22
  generators,
23
+ global_package,
21
24
  package,
22
25
  presets,
23
26
  setter_config,
@@ -43,6 +46,7 @@ from rbx.box.statements import build_statements
43
46
  from rbx.box.testcase_utils import TestcaseEntry
44
47
  from rbx.box.testcases import main as testcases
45
48
  from rbx.box.tooling import main as tooling
49
+ from rbx.grading import grading_context
46
50
 
47
51
  app = typer.Typer(no_args_is_help=True, cls=annotations.AliasGroup)
48
52
  app.add_typer(
@@ -105,6 +109,15 @@ app.add_typer(
105
109
 
106
110
  @app.callback()
107
111
  def main(
112
+ cache: Annotated[
113
+ int,
114
+ typer.Option(
115
+ '-c',
116
+ '--cache',
117
+ help='Which degree of caching to use.',
118
+ default_factory=lambda: grading_context.CacheLevel.CACHE_ALL.value,
119
+ ),
120
+ ],
108
121
  sanitized: bool = typer.Option(
109
122
  False,
110
123
  '--sanitized',
@@ -118,12 +131,23 @@ def main(
118
131
  flag_value=False,
119
132
  help='Whether to save extra logs and outputs from interactive solutions.',
120
133
  ),
134
+ profile: bool = typer.Option(
135
+ False,
136
+ '--profile',
137
+ '-p',
138
+ help='Whether to profile the execution.',
139
+ ),
121
140
  ):
122
141
  if cd.is_problem_package() and not package.is_cache_valid():
123
142
  console.console.print(
124
143
  '[warning]Cache is incompatible with the current version of [item]rbx[/item], so it will be cleared.[/warning]'
125
144
  )
126
145
  clear()
146
+ if not global_package.is_global_cache_valid():
147
+ console.console.print(
148
+ '[warning]Global cache is incompatible with the current version of [item]rbx[/item], so it will be cleared.[/warning]'
149
+ )
150
+ clear(global_cache=True)
127
151
 
128
152
  state.STATE.run_through_cli = True
129
153
  state.STATE.sanitized = sanitized
@@ -134,6 +158,13 @@ def main(
134
158
  )
135
159
  state.STATE.debug_logs = capture
136
160
 
161
+ grading_context.cache_level_var.set(grading_context.CacheLevel(cache))
162
+
163
+ if profile:
164
+ from rbx.grading import profiling
165
+
166
+ atexit.register(profiling.print_summary)
167
+
137
168
 
138
169
  @app.command('ui', hidden=True)
139
170
  @package.within_problem
@@ -252,17 +283,17 @@ async def run(
252
283
  )
253
284
  check = False
254
285
 
255
- tracked_solutions = None
286
+ tracked_solutions: Optional[OrderedSet[str]] = None
256
287
  if outcome is not None:
257
- tracked_solutions = {
288
+ tracked_solutions = OrderedSet(
258
289
  str(solution.path)
259
290
  for solution in get_matching_solutions(ExpectedOutcome(outcome))
260
- }
291
+ )
261
292
  if solutions:
262
- tracked_solutions = set(solutions)
293
+ tracked_solutions = OrderedSet(solutions)
263
294
 
264
295
  if choice:
265
- tracked_solutions = set(
296
+ tracked_solutions = OrderedSet(
266
297
  await pick_solutions(
267
298
  tracked_solutions,
268
299
  extra_solutions=solutions,
@@ -307,10 +338,10 @@ async def run(
307
338
  console.console.print(
308
339
  '[warning]Sanitizers are running, and no solutions were specified to run. Will only run [item]ACCEPTED[/item] solutions.'
309
340
  )
310
- tracked_solutions = {
341
+ tracked_solutions = OrderedSet(
311
342
  str(solution.path)
312
343
  for solution in get_exact_matching_solutions(ExpectedOutcome.ACCEPTED)
313
- }
344
+ )
314
345
 
315
346
  with utils.StatusProgress('Running solutions...') as s:
316
347
  solution_result = run_solutions(
@@ -343,10 +374,10 @@ async def _time_impl(check: bool, detailed: bool, runs: int = 0) -> Optional[int
343
374
  verification = VerificationLevel.ALL_SOLUTIONS.value
344
375
 
345
376
  with utils.StatusProgress('Running ACCEPTED solutions...') as s:
346
- tracked_solutions = {
377
+ tracked_solutions = OrderedSet(
347
378
  str(solution.path)
348
379
  for solution in get_exact_matching_solutions(ExpectedOutcome.ACCEPTED)
349
- }
380
+ )
350
381
  solution_result = run_solutions(
351
382
  progress=s,
352
383
  tracked_solutions=tracked_solutions,
@@ -495,17 +526,17 @@ async def irun(
495
526
  )
496
527
  return
497
528
 
498
- tracked_solutions = None
529
+ tracked_solutions: Optional[OrderedSet[str]] = None
499
530
  if outcome is not None:
500
- tracked_solutions = {
531
+ tracked_solutions = OrderedSet(
501
532
  str(solution.path)
502
533
  for solution in get_matching_solutions(ExpectedOutcome(outcome))
503
- }
534
+ )
504
535
  if solutions:
505
- tracked_solutions = set(solutions)
536
+ tracked_solutions = OrderedSet(solutions)
506
537
 
507
538
  if choice:
508
- tracked_solutions = set(
539
+ tracked_solutions = OrderedSet(
509
540
  await pick_solutions(
510
541
  tracked_solutions,
511
542
  extra_solutions=solutions,
@@ -519,10 +550,10 @@ async def irun(
519
550
  console.console.print(
520
551
  '[warning]Sanitizers are running, and no solutions were specified to run. Will only run [item]ACCEPTED[/item] solutions.'
521
552
  )
522
- tracked_solutions = {
553
+ tracked_solutions = OrderedSet(
523
554
  str(solution.path)
524
555
  for solution in get_exact_matching_solutions(ExpectedOutcome.ACCEPTED)
525
- }
556
+ )
526
557
 
527
558
  with utils.StatusProgress('Running solutions...') as s:
528
559
  await run_and_print_interactive_solutions(
@@ -631,16 +662,17 @@ async def stress(
631
662
  from rbx.box import stresses
632
663
 
633
664
  with utils.StatusProgress('Running stress...') as s:
634
- report = await stresses.run_stress(
635
- timeout,
636
- name=name,
637
- generator_call=generator_args,
638
- finder=finder,
639
- findingsLimit=findings,
640
- progress=s,
641
- verbose=verbose,
642
- sanitized=sanitized,
643
- )
665
+ with grading_context.cache_level(grading_context.CacheLevel.CACHE_COMPILATION):
666
+ report = await stresses.run_stress(
667
+ timeout,
668
+ name=name,
669
+ generator_call=generator_args,
670
+ finder=finder,
671
+ findingsLimit=findings,
672
+ progress=s,
673
+ verbose=verbose,
674
+ sanitized=sanitized,
675
+ )
644
676
 
645
677
  stresses.print_stress_report(report)
646
678
 
@@ -922,13 +954,29 @@ def fix(print_diff: bool = typer.Option(False, '--print-diff', '-p')):
922
954
  linting.fix_package(print_diff=print_diff)
923
955
 
924
956
 
957
+ @cd.within_closest_package
958
+ def _clear_package_cache():
959
+ console.console.print('Cleaning cache and build directories...')
960
+ shutil.rmtree('.box', ignore_errors=True)
961
+ shutil.rmtree('build', ignore_errors=True)
962
+
963
+
925
964
  @app.command(
926
965
  'clear, clean',
927
966
  rich_help_panel='Management',
928
967
  help='Clears cache and build directories.',
929
968
  )
930
- @cd.within_closest_package
931
- def clear():
932
- console.console.print('Cleaning cache and build directories...')
933
- shutil.rmtree('.box', ignore_errors=True)
934
- shutil.rmtree('build', ignore_errors=True)
969
+ def clear(global_cache: bool = typer.Option(False, '--global', '-g')):
970
+ cleared = False
971
+ if global_cache:
972
+ console.console.print('Cleaning global cache...')
973
+ global_package.clear_global_cache()
974
+ cleared = True
975
+
976
+ closest_package = cd.find_package()
977
+ if closest_package is not None:
978
+ _clear_package_cache()
979
+ cleared = True
980
+
981
+ if not cleared:
982
+ console.console.print('[error]No cache or build directories to clean.[/error]')
rbx/box/code.py CHANGED
@@ -10,9 +10,10 @@ from typing import List, Optional
10
10
  import rich
11
11
  import rich.text
12
12
  import typer
13
+ from pydantic import BaseModel
13
14
 
14
15
  from rbx import console
15
- from rbx.box import download, package, setter_config, state
16
+ from rbx.box import download, global_package, package, setter_config, state
16
17
  from rbx.box.environment import (
17
18
  CompilationConfig,
18
19
  ExecutionConfig,
@@ -27,10 +28,11 @@ from rbx.box.environment import (
27
28
  merge_execution_configs,
28
29
  )
29
30
  from rbx.box.formatting import get_formatted_memory
31
+ from rbx.box.remote import is_path_remote
30
32
  from rbx.box.sanitizers import warning_stack
31
33
  from rbx.box.schema import CodeItem
32
- from rbx.grading import steps, steps_with_caching
33
- from rbx.grading.judge.sandbox import SandboxBase, SandboxParams
34
+ from rbx.grading import grading_context, profiling, steps, steps_with_caching
35
+ from rbx.grading.judge.sandbox import SandboxParams
34
36
  from rbx.grading.steps import (
35
37
  DigestHolder,
36
38
  DigestOrDest,
@@ -59,6 +61,10 @@ class SanitizationLevel(Enum):
59
61
  return self.value >= SanitizationLevel.FORCE.value
60
62
 
61
63
 
64
+ class CompilationMetadata(BaseModel):
65
+ is_sanitized: bool
66
+
67
+
62
68
  def substitute_commands(commands: List[str], sanitized: bool = False) -> List[str]:
63
69
  cfg = setter_config.get_setter_config()
64
70
  return [cfg.substitute_command(command, sanitized) for command in commands]
@@ -78,8 +84,15 @@ def find_language_name(code: CodeItem) -> str:
78
84
  def is_executable_sanitized(executable: DigestOrSource) -> bool:
79
85
  if executable.digest is None:
80
86
  return False
81
- storage = package.get_cache_storage()
82
- return storage.exists(f'{executable.digest.value}.san')
87
+ if executable.digest.value is None:
88
+ return False
89
+ cacher = package.get_file_cacher()
90
+ desc = cacher.get_metadata(
91
+ executable.digest.value, 'compilation', CompilationMetadata
92
+ )
93
+ if desc is None:
94
+ return False
95
+ return desc.is_sanitized
83
96
 
84
97
 
85
98
  def add_sanitizer_flags_to_command(command: str) -> str:
@@ -402,7 +415,6 @@ def _should_precompile(commands: List[str]) -> bool:
402
415
  def _precompile_header(
403
416
  compilation_options: CompilationConfig,
404
417
  sanitized: SanitizationLevel,
405
- sandbox: SandboxBase,
406
418
  sandbox_params: SandboxParams,
407
419
  artifacts: GradingArtifacts,
408
420
  input_artifact: GradingFileInput,
@@ -417,7 +429,8 @@ def _precompile_header(
417
429
  """
418
430
  assert compilation_options.commands is not None
419
431
 
420
- dependency_cache = package.get_dependency_cache()
432
+ sandbox = global_package.get_global_sandbox()
433
+ dependency_cache = global_package.get_global_dependency_cache()
421
434
 
422
435
  # TODO: deduplicate code with compile_item.
423
436
  commands = get_mapped_commands(
@@ -455,41 +468,60 @@ def _precompile_header(
455
468
  GradingFileOutput(
456
469
  src=PosixPath('precompilable.h.gch'),
457
470
  digest=precompiled_digest,
458
- executable=True,
459
471
  )
460
472
  )
461
473
 
462
- if not steps_with_caching.compile(
463
- commands,
464
- params=sandbox_params,
465
- artifacts=precompilation_artifacts,
466
- sandbox=sandbox,
467
- dependency_cache=dependency_cache,
468
- ):
469
- console.console.print(
470
- f'[error]Failed to precompile header file: [item]{input_artifact.src}[/item][/error]'
471
- )
472
- raise typer.Exit(1)
474
+ with profiling.PushContext('code.precompile_header'):
475
+ if not steps_with_caching.compile(
476
+ commands,
477
+ params=sandbox_params,
478
+ artifacts=precompilation_artifacts,
479
+ sandbox=sandbox,
480
+ dependency_cache=dependency_cache,
481
+ ):
482
+ console.console.print(
483
+ f'[error]Failed to precompile header file: [item]{input_artifact.src}[/item][/error]'
484
+ )
485
+ raise typer.Exit(1)
473
486
 
474
- if verbose:
475
- console.console.print(
476
- f'[status]Precompiled header file: [item]{input_artifact.src}[/item]'
477
- )
487
+ if verbose:
488
+ console.console.print(
489
+ f'[status]Precompiled header file: [item]{input_artifact.src}[/item]'
490
+ )
478
491
 
479
- if (
480
- precompilation_artifacts.logs is not None
481
- and precompilation_artifacts.logs.preprocess is not None
482
- ):
483
- for log in precompilation_artifacts.logs.preprocess:
484
- console.console.print(f'[status]Command:[/status] {log.get_command()}')
485
- console.console.print(f'[status]Summary:[/status] {log.get_summary()}')
492
+ if (
493
+ precompilation_artifacts.logs is not None
494
+ and precompilation_artifacts.logs.preprocess is not None
495
+ ):
496
+ for log in precompilation_artifacts.logs.preprocess:
497
+ console.console.print(
498
+ f'[status]Command:[/status] {log.get_command()}'
499
+ )
500
+ console.console.print(
501
+ f'[status]Summary:[/status] {log.get_summary()}'
502
+ )
486
503
 
487
504
  assert precompiled_digest.value is not None
488
505
 
506
+ digest_path = dependency_cache.cacher.path_for_symlink(precompiled_digest.value)
507
+ if digest_path is not None and digest_path.is_file():
508
+ # If storage backend supports symlinks, use it as the grading input.
509
+ input = DigestOrSource.create(digest_path)
510
+ else:
511
+ # Otherwise, copy the file to the local cache, transiently.
512
+ local_cacher = package.get_file_cacher()
513
+ with dependency_cache.cacher.get_file(precompiled_digest.value) as f:
514
+ with grading_context.cache_level(
515
+ grading_context.CacheLevel.CACHE_TRANSIENTLY
516
+ ):
517
+ input = DigestOrSource.create(local_cacher.put_file_from_fobj(f))
518
+
489
519
  return GradingFileInput(
490
- digest=precompiled_digest,
520
+ **input.expand(),
491
521
  dest=input_artifact.dest.with_suffix('.h.gch'),
492
- executable=True,
522
+ # Do not track fingerprint of the precompiled header file,
523
+ # trust the compilation step above.
524
+ hash=False,
493
525
  )
494
526
 
495
527
 
@@ -576,37 +608,43 @@ def compile_item(
576
608
 
577
609
  # Precompile C++ interesting header files.
578
610
  if precompile and _should_precompile(commands):
579
- precompilation_inputs = []
580
- for input in artifacts.inputs:
581
- if (
582
- input.src is not None
583
- and input.src.suffix == '.h'
584
- and input.dest.name in ['stdc++.h', 'jngen.h', 'testlib.h']
585
- ):
586
- precompilation_inputs.append(
587
- _precompile_header(
588
- compilation_options,
589
- sanitized,
590
- sandbox,
591
- sandbox_params,
592
- artifacts,
593
- input,
594
- force_warnings,
595
- verbose=False,
611
+ with profiling.Profiler('code.precompile'):
612
+ precompilation_inputs = []
613
+ for input in artifacts.inputs:
614
+ if (
615
+ input.src is not None
616
+ and input.src.suffix == '.h'
617
+ and input.dest.name in ['stdc++.h', 'jngen.h', 'testlib.h']
618
+ ):
619
+ precompilation_inputs.append(
620
+ _precompile_header(
621
+ compilation_options,
622
+ sanitized,
623
+ sandbox_params,
624
+ artifacts,
625
+ input,
626
+ force_warnings,
627
+ verbose=False,
628
+ )
596
629
  )
597
- )
598
- if precompilation_inputs:
599
- artifacts.inputs.extend(precompilation_inputs)
600
-
601
- # Compile the code.
602
- if not steps_with_caching.compile(
603
- commands,
604
- params=sandbox_params,
605
- artifacts=artifacts,
606
- sandbox=sandbox,
607
- dependency_cache=dependency_cache,
608
- ):
609
- raise typer.Exit(1)
630
+ if precompilation_inputs:
631
+ artifacts.inputs.extend(precompilation_inputs)
632
+
633
+ with profiling.Profiler('code.compile'):
634
+ # Compile the code.
635
+ # Do not cache remote solutions.
636
+ with grading_context.cache_level(
637
+ grading_context.CacheLevel.NO_CACHE,
638
+ when=lambda: is_path_remote(code.path),
639
+ ):
640
+ if not steps_with_caching.compile(
641
+ commands,
642
+ params=sandbox_params,
643
+ artifacts=artifacts,
644
+ sandbox=sandbox,
645
+ dependency_cache=dependency_cache,
646
+ ):
647
+ raise typer.Exit(1)
610
648
 
611
649
  assert compiled_digest.value is not None
612
650
 
@@ -629,13 +667,13 @@ def compile_item(
629
667
  warning_stack.get_warning_stack().add_warning(code)
630
668
 
631
669
  # Create sentinel to indicate this executable is sanitized.
632
- storage = package.get_cache_storage()
670
+ cacher = package.get_file_cacher()
633
671
  if sanitized.should_sanitize():
634
- pf = storage.create_file(f'{compiled_digest.value}.san')
635
- if pf is not None:
636
- storage.commit_file(pf)
637
- elif storage.exists(f'{compiled_digest.value}.san'):
638
- storage.delete(f'{compiled_digest.value}.san')
672
+ cacher.set_metadata(
673
+ compiled_digest.value, 'compilation', CompilationMetadata(is_sanitized=True)
674
+ )
675
+ else:
676
+ cacher.set_metadata(compiled_digest.value, 'compilation', None)
639
677
 
640
678
  return compiled_digest.value
641
679
 
@@ -669,14 +707,20 @@ async def run_item(
669
707
  retry_index,
670
708
  )
671
709
 
672
- run_log = await steps_with_caching.run(
673
- prepared.command,
674
- params=prepared.sandbox_params,
675
- sandbox=package.get_singleton_sandbox(),
676
- artifacts=prepared.artifacts,
677
- dependency_cache=dependency_cache,
678
- metadata=prepared.metadata,
679
- )
710
+ with profiling.PushContext('code.run_item'):
711
+ # Do not cache remote solutions.
712
+ with grading_context.cache_level(
713
+ grading_context.CacheLevel.NO_CACHE,
714
+ when=lambda: is_path_remote(code.path),
715
+ ):
716
+ run_log = await steps_with_caching.run(
717
+ prepared.command,
718
+ params=prepared.sandbox_params,
719
+ sandbox=package.get_singleton_sandbox(),
720
+ artifacts=prepared.artifacts,
721
+ dependency_cache=dependency_cache,
722
+ metadata=prepared.metadata,
723
+ )
680
724
 
681
725
  # Find sanitizer logs.
682
726
  if run_log is not None and run_log.warnings:
@@ -686,7 +730,7 @@ async def run_item(
686
730
  )
687
731
  if stderr_output is not None:
688
732
  warning_stack.get_warning_stack().add_sanitizer_warning(
689
- package.get_cache_storage(), code, stderr_output
733
+ package.get_file_cacher(), code, stderr_output
690
734
  )
691
735
  return run_log
692
736
 
@@ -773,8 +817,13 @@ async def run_communication(
773
817
  metadata=solution_prepared.metadata,
774
818
  )
775
819
 
776
- return await steps_with_caching.run_coordinated(
777
- interactor_run_params,
778
- solution_run_params,
779
- dependency_cache=package.get_dependency_cache(),
780
- )
820
+ # Do not cache remote solutions.
821
+ with grading_context.cache_level(
822
+ grading_context.CacheLevel.NO_CACHE,
823
+ when=lambda: is_path_remote(solution.code.path),
824
+ ):
825
+ return await steps_with_caching.run_coordinated(
826
+ interactor_run_params,
827
+ solution_run_params,
828
+ dependency_cache=package.get_dependency_cache(),
829
+ )
@@ -0,0 +1,74 @@
1
+ import functools
2
+ import pathlib
3
+ import shutil
4
+
5
+ from rbx.config import get_app_path
6
+ from rbx.grading.caching import DependencyCache
7
+ from rbx.grading.judge.cacher import FileCacher
8
+ from rbx.grading.judge.sandbox import SandboxBase
9
+ from rbx.grading.judge.sandboxes.stupid_sandbox import StupidSandbox
10
+ from rbx.grading.judge.storage import FilesystemStorage, Storage
11
+
12
+ CACHE_STEP_VERSION = 3
13
+
14
+
15
+ def get_cache_fingerprint() -> str:
16
+ return f'{CACHE_STEP_VERSION}'
17
+
18
+
19
+ @functools.cache
20
+ def is_cache_valid(cache_dir: pathlib.Path) -> bool:
21
+ if not cache_dir.is_dir():
22
+ return True
23
+ fingerprint_file = cache_dir / 'fingerprint'
24
+ if not fingerprint_file.is_file():
25
+ return False
26
+ fingerprint = fingerprint_file.read_text()
27
+ if fingerprint.strip() != get_cache_fingerprint():
28
+ return False
29
+ return True
30
+
31
+
32
+ @functools.cache
33
+ def get_global_cache_dir() -> pathlib.Path:
34
+ cache_dir = get_app_path() / '.box'
35
+ cache_dir.mkdir(parents=True, exist_ok=True)
36
+ fingerprint_file = cache_dir / 'fingerprint'
37
+ if not fingerprint_file.is_file():
38
+ fingerprint_file.write_text(get_cache_fingerprint())
39
+ return cache_dir
40
+
41
+
42
+ def is_global_cache_valid() -> bool:
43
+ return is_cache_valid(get_global_cache_dir())
44
+
45
+
46
+ @functools.cache
47
+ def get_global_storage_dir() -> pathlib.Path:
48
+ storage_dir = get_global_cache_dir() / '.storage'
49
+ storage_dir.mkdir(parents=True, exist_ok=True)
50
+ return storage_dir
51
+
52
+
53
+ @functools.cache
54
+ def get_global_cache_storage() -> Storage:
55
+ return FilesystemStorage(get_global_storage_dir())
56
+
57
+
58
+ @functools.cache
59
+ def get_global_file_cacher() -> FileCacher:
60
+ return FileCacher(get_global_cache_storage())
61
+
62
+
63
+ @functools.cache
64
+ def get_global_dependency_cache() -> DependencyCache:
65
+ return DependencyCache(get_global_cache_dir(), get_global_file_cacher())
66
+
67
+
68
+ @functools.cache
69
+ def get_global_sandbox() -> SandboxBase:
70
+ return StupidSandbox(get_global_file_cacher())
71
+
72
+
73
+ def clear_global_cache():
74
+ shutil.rmtree(get_global_cache_dir(), ignore_errors=True)
rbx/box/package.py CHANGED
@@ -11,8 +11,9 @@ import typer
11
11
  from pydantic import ValidationError
12
12
 
13
13
  from rbx import console, utils
14
- from rbx.box import cd
14
+ from rbx.box import cd, global_package
15
15
  from rbx.box.environment import get_sandbox_type
16
+ from rbx.box.global_package import get_cache_fingerprint
16
17
  from rbx.box.schema import (
17
18
  CodeItem,
18
19
  ExpectedOutcome,
@@ -34,7 +35,6 @@ YAML_NAME = 'problem.rbx.yml'
34
35
  _DEFAULT_CHECKER = 'wcmp.cpp'
35
36
  _NOOP_CHECKER = 'noop.cpp'
36
37
  TEMP_DIR = None
37
- CACHE_STEP_VERSION = 1
38
38
 
39
39
 
40
40
  @functools.cache
@@ -107,17 +107,13 @@ def get_ruyaml(root: pathlib.Path = pathlib.Path()) -> Tuple[ruyaml.YAML, ruyaml
107
107
  return res, res.load(problem_yaml_path.read_text())
108
108
 
109
109
 
110
- def _get_fingerprint() -> str:
111
- return f'{CACHE_STEP_VERSION}'
112
-
113
-
114
110
  @functools.cache
115
111
  def get_problem_cache_dir(root: pathlib.Path = pathlib.Path()) -> pathlib.Path:
116
112
  cache_dir = find_problem(root) / '.box'
117
113
  cache_dir.mkdir(parents=True, exist_ok=True)
118
114
  fingerprint_file = cache_dir / 'fingerprint'
119
115
  if not fingerprint_file.is_file():
120
- fingerprint_file.write_text(_get_fingerprint())
116
+ fingerprint_file.write_text(get_cache_fingerprint())
121
117
  return cache_dir
122
118
 
123
119
 
@@ -170,12 +166,12 @@ def get_problem_preprocessed_path(
170
166
 
171
167
  @functools.cache
172
168
  def get_cache_storage(root: pathlib.Path = pathlib.Path()) -> Storage:
173
- return FilesystemStorage(get_problem_storage_dir(root))
169
+ return FilesystemStorage(get_problem_storage_dir(root), compress=False)
174
170
 
175
171
 
176
172
  @functools.cache
177
173
  def get_dependency_cache(root: pathlib.Path = pathlib.Path()) -> DependencyCache:
178
- return DependencyCache(get_problem_cache_dir(root), get_cache_storage(root))
174
+ return DependencyCache(get_problem_cache_dir(root), get_file_cacher(root))
179
175
 
180
176
 
181
177
  @functools.cache
@@ -427,16 +423,7 @@ def get_merged_capture_path(root: pathlib.Path = pathlib.Path()) -> pathlib.Path
427
423
  @functools.cache
428
424
  def is_cache_valid(root: pathlib.Path = pathlib.Path()):
429
425
  cache_dir = find_problem(root) / '.box'
430
- if not cache_dir.is_dir():
431
- return True
432
-
433
- fingerprint_file = cache_dir / 'fingerprint'
434
- if not fingerprint_file.is_file():
435
- return False
436
- fingerprint = fingerprint_file.read_text()
437
- if fingerprint.strip() != _get_fingerprint():
438
- return False
439
- return True
426
+ return global_package.is_cache_valid(cache_dir)
440
427
 
441
428
 
442
429
  def clear_package_cache():