rbx.cp 0.6.1__py3-none-any.whl → 0.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. rbx/box/cd.py +32 -4
  2. rbx/box/cli.py +82 -34
  3. rbx/box/code.py +131 -82
  4. rbx/box/contest/main.py +25 -0
  5. rbx/box/creation.py +3 -0
  6. rbx/box/global_package.py +74 -0
  7. rbx/box/linting.py +76 -7
  8. rbx/box/package.py +6 -19
  9. rbx/box/presets/__init__.py +4 -4
  10. rbx/box/remote.py +19 -0
  11. rbx/box/sanitizers/warning_stack.py +3 -3
  12. rbx/box/solutions.py +13 -7
  13. rbx/box/stats.py +10 -0
  14. rbx/box/stresses.py +45 -64
  15. rbx/box/stressing/finder_parser.py +11 -16
  16. rbx/box/tasks.py +33 -22
  17. rbx/box/tooling/boca/scraper.py +1 -1
  18. rbx/grading/caching.py +98 -47
  19. rbx/grading/debug_context.py +31 -0
  20. rbx/grading/grading_context.py +96 -0
  21. rbx/grading/judge/cacher.py +93 -21
  22. rbx/grading/judge/sandbox.py +6 -3
  23. rbx/grading/judge/sandboxes/timeit.py +1 -1
  24. rbx/grading/judge/storage.py +169 -35
  25. rbx/grading/profiling.py +126 -0
  26. rbx/grading/steps.py +44 -16
  27. rbx/grading/steps_with_caching.py +52 -26
  28. rbx/resources/presets/default/contest/.gitignore +2 -0
  29. rbx/resources/presets/default/contest/contest.rbx.yml +18 -4
  30. rbx/resources/presets/default/contest/statement/contest.rbx.tex +25 -86
  31. rbx/resources/presets/default/contest/statement/icpc.sty +322 -0
  32. rbx/resources/presets/default/contest/statement/instructions.tex +40 -0
  33. rbx/resources/presets/default/contest/statement/logo.png +0 -0
  34. rbx/resources/presets/default/contest/statement/template.rbx.tex +45 -36
  35. rbx/resources/presets/default/preset.rbx.yml +8 -6
  36. rbx/resources/presets/default/problem/problem.rbx.yml +20 -17
  37. rbx/resources/presets/default/problem/statement/icpc.sty +322 -0
  38. rbx/resources/presets/default/problem/statement/template.rbx.tex +47 -79
  39. {rbx_cp-0.6.1.dist-info → rbx_cp-0.8.0.dist-info}/METADATA +4 -1
  40. {rbx_cp-0.6.1.dist-info → rbx_cp-0.8.0.dist-info}/RECORD +48 -41
  41. rbx/resources/presets/default/contest/statement/olymp.sty +0 -250
  42. rbx/resources/presets/default/problem/statement/olymp.sty +0 -250
  43. /rbx/resources/presets/default/problem/{gen.cpp → gens/gen.cpp} +0 -0
  44. /rbx/resources/presets/default/problem/{tests → manual_tests}/samples/000.in +0 -0
  45. /rbx/resources/presets/default/problem/{tests → manual_tests}/samples/001.in +0 -0
  46. /rbx/resources/presets/default/problem/{random.py → testplan/random.py} +0 -0
  47. /rbx/resources/presets/default/problem/{random.txt → testplan/random.txt} +0 -0
  48. {rbx_cp-0.6.1.dist-info → rbx_cp-0.8.0.dist-info}/LICENSE +0 -0
  49. {rbx_cp-0.6.1.dist-info → rbx_cp-0.8.0.dist-info}/WHEEL +0 -0
  50. {rbx_cp-0.6.1.dist-info → rbx_cp-0.8.0.dist-info}/entry_points.txt +0 -0
@@ -7,8 +7,11 @@ import pathlib
7
7
  import shutil
8
8
  import tempfile
9
9
  import typing
10
- from typing import IO, List, Optional
10
+ from typing import IO, Dict, List, Optional, Type
11
11
 
12
+ from pydantic import BaseModel
13
+
14
+ from rbx.grading import grading_context
12
15
  from rbx.grading.judge import digester, storage
13
16
 
14
17
  logger = logging.getLogger(__name__)
@@ -60,6 +63,7 @@ class FileCacher:
60
63
  self.backend = backend
61
64
  self.shared = shared
62
65
  self.folder = folder
66
+ self.existing = set()
63
67
 
64
68
  # First we create the config directories.
65
69
  if folder:
@@ -146,6 +150,11 @@ class FileCacher:
146
150
 
147
151
  logger.debug('File %s not in cache, downloading ' 'from database.', digest)
148
152
 
153
+ if (symlink := self.backend.path_for_symlink(digest)) is not None:
154
+ cache_file_path.unlink(missing_ok=True)
155
+ cache_file_path.symlink_to(symlink)
156
+ return cache_file_path.open('rb') if not cache_only else None
157
+
149
158
  ftmp_handle, temp_file_path = tempfile.mkstemp(dir=self.temp_dir, text=False)
150
159
  temp_file_path = pathlib.Path(temp_file_path)
151
160
  with open(ftmp_handle, 'wb') as ftmp, self.backend.get_file(digest) as fobj:
@@ -168,6 +177,22 @@ class FileCacher:
168
177
  if not cache_only:
169
178
  return fd
170
179
 
180
+ def exists(self, digest: str, cache_only: bool = False) -> bool:
181
+ """Check if a file exists in the cacher.
182
+
183
+ cache_only (bool): don't check the backend.
184
+
185
+ """
186
+ cache_file_path = self.file_dir / digest
187
+ if cache_file_path.exists() or digest in self.existing:
188
+ return True
189
+ if cache_only:
190
+ return False
191
+ exists = self.backend.exists(digest)
192
+ if exists:
193
+ self.existing.add(digest)
194
+ return exists
195
+
171
196
  def cache_file(self, digest: str):
172
197
  """Load a file into the cache.
173
198
 
@@ -219,9 +244,18 @@ class FileCacher:
219
244
  if digest == storage.TOMBSTONE:
220
245
  raise TombstoneError()
221
246
 
247
+ if grading_context.is_transient():
248
+ return None
249
+
222
250
  logger.debug('Getting symlink file path %s.', digest)
223
251
  return self.backend.path_for_symlink(digest)
224
252
 
253
+ def digest_from_symlink(self, link: pathlib.Path) -> Optional[str]:
254
+ if grading_context.is_transient():
255
+ return None
256
+
257
+ return self.backend.filename_from_symlink(link)
258
+
225
259
  def get_file_content(self, digest: str) -> bytes:
226
260
  """Retrieve a file from the storage.
227
261
 
@@ -280,7 +314,9 @@ class FileCacher:
280
314
  with dst_path.open('wb') as dst:
281
315
  storage.copyfileobj(src, dst, self.CHUNK_SIZE)
282
316
 
283
- def put_file_from_fobj(self, src: IO[bytes], desc: str = '') -> str:
317
+ def put_file_from_fobj(
318
+ self, src: IO[bytes], metadata: Optional[Dict[str, BaseModel]] = None
319
+ ) -> str:
284
320
  """Store a file in the storage.
285
321
 
286
322
  If it's already (for some reason...) in the cache send that
@@ -292,7 +328,7 @@ class FileCacher:
292
328
 
293
329
  src (fileobj): a readable binary file-like object from which
294
330
  to read the contents of the file.
295
- desc (unicode): the (optional) description to associate to the
331
+ metadata (Dict[str, BaseModel]): the (optional) metadata to associate to the
296
332
  file.
297
333
 
298
334
  return (unicode): the digest of the stored file.
@@ -334,36 +370,45 @@ class FileCacher:
334
370
  # We read from the temporary file before moving it to
335
371
  # cache_file_path because the latter might be deleted before
336
372
  # we get a chance to open it.
337
- with open(dst.name, 'rb') as src:
338
- pending_file = self.backend.create_file(digest)
339
- if pending_file is not None:
340
- storage.copyfileobj(src, pending_file.fd, self.CHUNK_SIZE)
341
- self.backend.commit_file(pending_file, desc)
373
+ #
374
+ # Only store file when not in transient mode.
375
+ if not grading_context.is_transient():
376
+ with open(dst.name, 'rb') as src:
377
+ pending_file = self.backend.create_file(digest)
378
+ if pending_file is not None:
379
+ storage.copyfileobj(src, pending_file.fd, self.CHUNK_SIZE)
380
+ self.backend.commit_file(pending_file, metadata)
342
381
 
343
382
  os.rename(dst.name, cache_file_path)
344
383
 
345
384
  return digest
346
385
 
347
- def put_file_content(self, content: bytes, desc: str = '') -> str:
386
+ def put_file_content(
387
+ self, content: bytes, metadata: Optional[Dict[str, BaseModel]] = None
388
+ ) -> str:
348
389
  """Store a file in the storage.
349
390
 
350
391
  See `put_file_from_fobj'. This method will read the content of
351
392
  the file from the given binary string.
352
393
 
353
394
  content (bytes): the content of the file to store.
354
- desc (unicode): the (optional) description to associate to the
395
+ metadata (Dict[str, BaseModel]): the (optional) metadata to associate to the
355
396
  file.
356
397
 
357
398
  return (unicode): the digest of the stored file.
358
399
 
359
400
  """
360
401
  with io.BytesIO(content) as src:
361
- return self.put_file_from_fobj(src, desc)
402
+ return self.put_file_from_fobj(src, metadata)
362
403
 
363
- def put_file_text(self, text: str, desc: str = '') -> str:
364
- return self.put_file_content(text.encode('utf-8'), desc)
404
+ def put_file_text(
405
+ self, text: str, metadata: Optional[Dict[str, BaseModel]] = None
406
+ ) -> str:
407
+ return self.put_file_content(text.encode('utf-8'), metadata)
365
408
 
366
- def put_file_from_path(self, src_path: pathlib.Path, desc: str = '') -> str:
409
+ def put_file_from_path(
410
+ self, src_path: pathlib.Path, metadata: Optional[Dict[str, BaseModel]] = None
411
+ ) -> str:
367
412
  """Store a file in the storage.
368
413
 
369
414
  See `put_file_from_fobj'. This method will read the content of
@@ -371,28 +416,53 @@ class FileCacher:
371
416
 
372
417
  src_path (Path): an accessible location on the file-system
373
418
  from which to read the contents of the file.
374
- desc (unicode): the (optional) description to associate to the
419
+ metadata (Dict[str, BaseModel]): the (optional) metadata to associate to the
375
420
  file.
376
421
 
377
422
  return (unicode): the digest of the stored file.
378
423
 
379
424
  """
380
425
  with src_path.open('rb') as src:
381
- return self.put_file_from_fobj(src, desc)
426
+ return self.put_file_from_fobj(src, metadata)
427
+
428
+ def set_metadata(self, digest: str, key: str, value: Optional[BaseModel]):
429
+ """Set the description of a file given its digest.
430
+
431
+ digest (unicode): the digest of the file to add the description.
432
+ key (str): the key of the metadata to add.
433
+ value (BaseModel): the value of the metadata to add.
434
+ """
435
+ if grading_context.is_transient():
436
+ return
437
+ self.backend.set_metadata(digest, key, value)
382
438
 
383
- def describe(self, digest: str) -> str:
439
+ def get_metadata(
440
+ self, digest: str, key: str, model_cls: Type[storage.BaseModelT]
441
+ ) -> Optional[storage.BaseModelT]:
384
442
  """Return the description of a file given its digest.
385
443
 
386
444
  digest (unicode): the digest of the file to describe.
387
-
388
- return (unicode): the description of the file.
445
+ key (str): the key of the metadata to get.
446
+ model_cls (Type[storage.BaseModelT]): the model class of the metadata.
447
+ return (BaseModel): the metadata of the file.
389
448
 
390
449
  raise (KeyError): if the file cannot be found.
391
450
 
392
451
  """
393
452
  if digest == storage.TOMBSTONE:
394
453
  raise TombstoneError()
395
- return self.backend.describe(digest)
454
+ return typing.cast(
455
+ Optional[storage.BaseModelT],
456
+ self.backend.get_metadata(digest, key, model_cls),
457
+ )
458
+
459
+ def list_metadata(self, filename: str) -> List[str]:
460
+ """List the metadata of a file given its filename.
461
+
462
+ filename (str): the filename of the file to list the metadata.
463
+ return (List[str]): the list of metadata keys.
464
+ """
465
+ return self.backend.list_metadata(filename)
396
466
 
397
467
  def get_size(self, digest: str) -> int:
398
468
  """Return the size of a file given its digest.
@@ -431,6 +501,7 @@ class FileCacher:
431
501
  return
432
502
  cache_file_path: pathlib.Path = self.file_dir / digest
433
503
  cache_file_path.unlink(missing_ok=True)
504
+ self.existing.discard(digest)
434
505
 
435
506
  def purge_cache(self):
436
507
  """Empty the local cache.
@@ -442,6 +513,7 @@ class FileCacher:
442
513
  self.file_dir.mkdir(parents=True, exist_ok=True)
443
514
  if self.folder is not None:
444
515
  self.folder.mkdir(parents=True, exist_ok=True)
516
+ self.existing.clear()
445
517
 
446
518
  def destroy_cache(self):
447
519
  """Completely remove and destroy the cache.
@@ -456,7 +528,7 @@ class FileCacher:
456
528
  raise Exception('You may not destroy a shared cache.')
457
529
  shutil.rmtree(str(self.file_dir))
458
530
 
459
- def list(self) -> List[storage.FileWithDescription]:
531
+ def list(self) -> List[storage.FileWithMetadata]:
460
532
  """List the files available in the storage.
461
533
 
462
534
  return ([(unicode, unicode)]): a list of pairs, each
@@ -647,12 +647,15 @@ class SandboxBase(abc.ABC):
647
647
  return self.get_file_to_bytes(path, maxlen).decode('utf-8')
648
648
 
649
649
  def get_file_to_storage(
650
- self, path: pathlib.Path, description: str = '', trunc_len: Optional[int] = None
650
+ self,
651
+ path: pathlib.Path,
652
+ metadata: Optional[Dict[str, pydantic.BaseModel]] = None,
653
+ trunc_len: Optional[int] = None,
651
654
  ) -> str:
652
655
  """Put a sandbox file in FS and return its digest.
653
656
 
654
657
  path (Path): relative path of the file inside the sandbox.
655
- description (str): the description for FS.
658
+ metadata (Dict[str, pydantic.BaseModel]): the metadata for FS.
656
659
  trunc_len (int|None): if None, does nothing; otherwise, before
657
660
  returning truncate it at the specified length.
658
661
 
@@ -660,7 +663,7 @@ class SandboxBase(abc.ABC):
660
663
 
661
664
  """
662
665
  with self.get_file(path, trunc_len=trunc_len) as file_:
663
- return self.file_cacher.put_file_from_fobj(file_, description)
666
+ return self.file_cacher.put_file_from_fobj(file_, metadata)
664
667
 
665
668
  def stat_file(self, path: pathlib.Path) -> os.stat_result:
666
669
  """Return the stats of a file in the sandbox.
@@ -100,9 +100,9 @@ def create_tee(files, mode, buffer_size=4096, prefix=''):
100
100
  tee.file.write(tee.prefix)
101
101
  tee.file.write(bytes)
102
102
  tee.file.flush()
103
- new = bytes == b'\n'
104
103
  # TODO maybe add in fsync() here if the fileno() method
105
104
  # exists on file
105
+ new = bytes == b'\n'
106
106
  except Exception:
107
107
  pass
108
108
  finally:
@@ -3,13 +3,21 @@ import io
3
3
  import logging
4
4
  import pathlib
5
5
  import tempfile
6
+ import typing
6
7
  from abc import ABC, abstractmethod
7
- from typing import IO, AnyStr, List, Optional
8
+ from typing import IO, AnyStr, Dict, List, Optional, Type, TypeVar
9
+
10
+ import lz4.frame
11
+ from pydantic import BaseModel
12
+
13
+ from rbx.grading import grading_context
8
14
 
9
15
  logger = logging.getLogger(__name__)
10
16
 
11
17
  TOMBSTONE = 'x'
12
18
 
19
+ BaseModelT = TypeVar('BaseModelT', bound=BaseModel)
20
+
13
21
 
14
22
  def copyfileobj(
15
23
  source_fobj: IO[AnyStr],
@@ -43,16 +51,24 @@ def copyfileobj(
43
51
  maxlen -= written
44
52
 
45
53
 
54
+ COMPRESSION_LEVEL = 5
55
+
56
+
57
+ class CompressionMetadata(BaseModel):
58
+ compression_level: int
59
+
60
+
46
61
  @dataclasses.dataclass
47
62
  class PendingFile:
48
63
  fd: IO[bytes]
49
64
  filename: str
65
+ metadata: Dict[str, Optional[BaseModel]] = dataclasses.field(default_factory=dict)
50
66
 
51
67
 
52
68
  @dataclasses.dataclass
53
- class FileWithDescription:
69
+ class FileWithMetadata:
54
70
  filename: str
55
- description: str
71
+ metadata: List[str]
56
72
 
57
73
 
58
74
  class Storage(ABC):
@@ -81,13 +97,15 @@ class Storage(ABC):
81
97
  pass
82
98
 
83
99
  @abstractmethod
84
- def commit_file(self, file: PendingFile, desc: str = '') -> bool:
100
+ def commit_file(
101
+ self, file: PendingFile, metadata: Optional[Dict[str, BaseModel]] = None
102
+ ) -> bool:
85
103
  """Commit a file created by create_file() to be stored.
86
104
  Given a file object returned by create_file(), this function populates
87
105
  the database to record that this file now legitimately exists and can
88
106
  be used.
89
- fobj (fileobj): the object returned by create_file()
90
107
  file (PendingFile): the file to commit.
108
+ metadata (Dict[str, BaseModel]): the metadata of the file.
91
109
  return (bool): True if the file was committed successfully, False if
92
110
  there was already a file with the same filename in the database. This
93
111
  shouldn't make any difference to the caller, except for testing
@@ -96,19 +114,40 @@ class Storage(ABC):
96
114
  pass
97
115
 
98
116
  @abstractmethod
99
- def exists(self, filename: str) -> bool:
100
- """Check if a file exists in the storage."""
117
+ def set_metadata(self, filename: str, key: str, value: Optional[BaseModel]):
118
+ """Set the metadata of a file given its filename.
119
+ filename (unicode): the filename of the file to set the metadata.
120
+ key (unicode): the key of the metadata to set.
121
+ value (BaseModel): the value of the metadata to set.
122
+ """
101
123
  pass
102
124
 
103
125
  @abstractmethod
104
- def describe(self, filename: str) -> str:
105
- """Return the description of a file given its filename.
106
- filename (unicode): the filename of the file to describe.
107
- return (unicode): the description of the file.
126
+ def get_metadata(
127
+ self, filename: str, key: str, model_cls: Type[BaseModel]
128
+ ) -> Optional[BaseModel]:
129
+ """Get the metadata of a file given its filename and key.
130
+ filename (unicode): the filename of the file to get the metadata.
131
+ key (unicode): the key of the metadata to get.
132
+ model_cls (Type[BaseModel]): the model class of the metadata.
133
+ return (BaseModel): the value of the metadata.
108
134
  raise (KeyError): if the file cannot be found.
109
135
  """
110
136
  pass
111
137
 
138
+ @abstractmethod
139
+ def list_metadata(self, filename: str) -> List[str]:
140
+ """List the metadata of a file given its filename.
141
+ filename (unicode): the filename of the file to list the metadata.
142
+ return (List[str]): the list of metadata keys.
143
+ """
144
+ pass
145
+
146
+ @abstractmethod
147
+ def exists(self, filename: str) -> bool:
148
+ """Check if a file exists in the storage."""
149
+ pass
150
+
112
151
  @abstractmethod
113
152
  def get_size(self, filename: str) -> int:
114
153
  """Return the size of a file given its filename.
@@ -127,7 +166,7 @@ class Storage(ABC):
127
166
  pass
128
167
 
129
168
  @abstractmethod
130
- def list(self) -> List[FileWithDescription]:
169
+ def list(self) -> List[FileWithMetadata]:
131
170
  """List the files available in the storage.
132
171
  return ([(unicode, unicode)]): a list of pairs, each
133
172
  representing a file in the form (filename, description).
@@ -138,6 +177,10 @@ class Storage(ABC):
138
177
  def path_for_symlink(self, filename: str) -> Optional[pathlib.Path]:
139
178
  pass
140
179
 
180
+ @abstractmethod
181
+ def filename_from_symlink(self, link: pathlib.Path) -> Optional[str]:
182
+ pass
183
+
141
184
 
142
185
  class NullStorage(Storage):
143
186
  """This backend is always empty, it just drops each file that
@@ -153,41 +196,54 @@ class NullStorage(Storage):
153
196
  def create_file(self, digest: str) -> Optional[PendingFile]:
154
197
  return None
155
198
 
156
- def commit_file(self, file: PendingFile, desc: str = '') -> bool:
199
+ def commit_file(
200
+ self, file: PendingFile, metadata: Optional[Dict[str, BaseModel]] = None
201
+ ) -> bool:
157
202
  return False
158
203
 
159
- def exists(self, filename: str) -> bool:
160
- return False
204
+ def set_metadata(self, filename: str, key: str, value: Optional[BaseModel]):
205
+ pass
161
206
 
162
- def describe(self, digest: str) -> str:
207
+ def get_metadata(
208
+ self, filename: str, key: str, model_cls: Type[BaseModel]
209
+ ) -> Optional[BaseModel]:
163
210
  raise KeyError('File not found.')
164
211
 
212
+ def list_metadata(self, filename: str) -> List[str]:
213
+ return []
214
+
215
+ def exists(self, filename: str) -> bool:
216
+ return False
217
+
165
218
  def get_size(self, digest: str) -> int:
166
219
  raise KeyError('File not found.')
167
220
 
168
221
  def delete(self, digest: str):
169
222
  pass
170
223
 
171
- def list(self) -> List[FileWithDescription]:
224
+ def list(self) -> List[FileWithMetadata]:
172
225
  return list()
173
226
 
174
227
  def path_for_symlink(self, digest: str) -> Optional[pathlib.Path]:
175
228
  return None
176
229
 
230
+ def filename_from_symlink(self, link: pathlib.Path) -> Optional[str]:
231
+ return None
232
+
177
233
 
178
234
  class FilesystemStorage(Storage):
179
235
  """This class implements a backend for FileCacher that keeps all
180
236
  the files in a file system directory, named after their filename.
181
237
  """
182
238
 
183
- def __init__(self, path: pathlib.Path):
239
+ def __init__(self, path: pathlib.Path, compress: bool = False):
184
240
  """Initialize the backend.
185
241
  path (string): the base path for the storage.
186
242
  """
187
243
  self.path = path
188
-
244
+ self.compress = compress
189
245
  # Create the directory if it doesn't exist
190
- path.mkdir(parents=True, exist_ok=True)
246
+ (path / '.metadata').mkdir(parents=True, exist_ok=True)
191
247
 
192
248
  def get_file(self, filename: str) -> IO[bytes]:
193
249
  """See FileCacherBackend.get_file()."""
@@ -196,6 +252,18 @@ class FilesystemStorage(Storage):
196
252
  if not file_path.is_file():
197
253
  raise KeyError('File not found.')
198
254
 
255
+ compression_metadata = self.get_metadata(
256
+ filename, 'compression', CompressionMetadata
257
+ )
258
+ if compression_metadata is not None:
259
+ return typing.cast(
260
+ IO[bytes],
261
+ lz4.frame.open(
262
+ file_path,
263
+ mode='rb',
264
+ compression_level=compression_metadata.compression_level,
265
+ ),
266
+ )
199
267
  return file_path.open('rb')
200
268
 
201
269
  def create_file(self, filename: str) -> Optional[PendingFile]:
@@ -211,13 +279,39 @@ class FilesystemStorage(Storage):
211
279
  temp_file = tempfile.NamedTemporaryFile(
212
280
  'wb', delete=False, prefix='.tmp.', suffix=filename, dir=self.path
213
281
  )
214
- return PendingFile(fd=temp_file, filename=filename)
215
-
216
- def commit_file(self, file: PendingFile, desc: str = '') -> bool:
282
+ metadata: Dict[str, Optional[BaseModel]] = {'compression': None}
283
+ if self.compress or grading_context.should_compress():
284
+ fd_name = temp_file.name
285
+ level = grading_context.get_compression_level()
286
+ temp_file = typing.cast(
287
+ IO[bytes],
288
+ lz4.frame.open(
289
+ temp_file,
290
+ mode='wb',
291
+ compression_level=level,
292
+ ),
293
+ )
294
+ temp_file.name = fd_name # type: ignore
295
+ metadata['compression'] = CompressionMetadata(compression_level=level)
296
+
297
+ return PendingFile(fd=temp_file, filename=filename, metadata=metadata)
298
+
299
+ def commit_file(
300
+ self, file: PendingFile, metadata: Optional[Dict[str, BaseModel]] = None
301
+ ) -> bool:
217
302
  """See FileCacherBackend.commit_file()."""
218
303
  file.fd.close()
219
304
 
220
305
  file_path: pathlib.Path = self.path / file.filename
306
+ file_path.parent.mkdir(parents=True, exist_ok=True)
307
+
308
+ for key, value in file.metadata.items():
309
+ self._set_metadata(file.filename, key, value)
310
+
311
+ if metadata is not None:
312
+ for key, value in metadata.items():
313
+ self._set_metadata(file.filename, key, value)
314
+
221
315
  # Move it into place in the cache. Skip if it already exists, and
222
316
  # delete the temporary file instead.
223
317
  if not file_path.is_file():
@@ -231,21 +325,43 @@ class FilesystemStorage(Storage):
231
325
  pathlib.PosixPath(file.fd.name).unlink()
232
326
  return False
233
327
 
328
+ def _get_metadata_path(self, filename: str, key: str) -> pathlib.Path:
329
+ return self.path / '.metadata' / f'{filename}__{key}.json'
330
+
331
+ def _set_metadata(self, filename: str, key: str, value: Optional[BaseModel]):
332
+ if value is None:
333
+ self._get_metadata_path(filename, key).unlink(missing_ok=True)
334
+ else:
335
+ metadata_path = self._get_metadata_path(filename, key)
336
+ metadata_path.parent.mkdir(parents=True, exist_ok=True)
337
+ metadata_path.write_text(value.model_dump_json())
338
+
339
+ def set_metadata(self, filename: str, key: str, value: Optional[BaseModel]):
340
+ if not self.exists(filename):
341
+ raise KeyError('File not found.')
342
+
343
+ self._set_metadata(filename, key, value)
344
+
345
+ def get_metadata(
346
+ self, filename: str, key: str, model_cls: Type[BaseModelT]
347
+ ) -> Optional[BaseModelT]:
348
+ path = self._get_metadata_path(filename, key)
349
+ if not path.is_file():
350
+ return None
351
+ return model_cls.model_validate_json(path.read_text())
352
+
353
+ def list_metadata(self, filename: str) -> List[str]:
354
+ return [
355
+ path.stem.split('__')[1]
356
+ for path in (self.path / '.metadata').glob(f'{filename}__*.json')
357
+ ]
358
+
234
359
  def exists(self, filename: str) -> bool:
235
360
  """See FileCacherBackend.exists()."""
236
361
  file_path: pathlib.Path = self.path / filename
237
362
 
238
363
  return file_path.is_file()
239
364
 
240
- def describe(self, filename: str) -> str:
241
- """See FileCacherBackend.describe()."""
242
- file_path: pathlib.Path = self.path / filename
243
-
244
- if not file_path.is_file():
245
- raise KeyError('File not found.')
246
-
247
- return ''
248
-
249
365
  def get_size(self, filename: str) -> int:
250
366
  """See FileCacherBackend.get_size()."""
251
367
  file_path: pathlib.Path = self.path / filename
@@ -260,15 +376,19 @@ class FilesystemStorage(Storage):
260
376
  file_path: pathlib.Path = self.path / filename
261
377
 
262
378
  file_path.unlink(missing_ok=True)
379
+ for key in self.list_metadata(filename):
380
+ self._get_metadata_path(filename, key).unlink(missing_ok=True)
263
381
 
264
- def list(self) -> List[FileWithDescription]:
382
+ def list(self) -> List[FileWithMetadata]:
265
383
  """See FileCacherBackend.list()."""
266
384
  res = []
267
385
  for path in self.path.glob('*'):
268
386
  if path.is_file():
387
+ filename = str(path.relative_to(self.path))
269
388
  res.append(
270
- FileWithDescription(
271
- filename=str(path.relative_to(self.path)), description=''
389
+ FileWithMetadata(
390
+ filename=filename,
391
+ metadata=self.list_metadata(filename),
272
392
  )
273
393
  )
274
394
  return res
@@ -277,4 +397,18 @@ class FilesystemStorage(Storage):
277
397
  file_path = self.path / filename
278
398
  if not file_path.is_file():
279
399
  raise KeyError('File not found.')
400
+
401
+ compression_metadata = self.get_metadata(
402
+ filename, 'compression', CompressionMetadata
403
+ )
404
+ if compression_metadata is not None:
405
+ return None
280
406
  return file_path
407
+
408
+ def filename_from_symlink(self, link: pathlib.Path) -> Optional[str]:
409
+ if not link.is_symlink():
410
+ return None
411
+ filename = link.readlink().resolve()
412
+ if not filename.is_file():
413
+ return None
414
+ return str(filename.relative_to(self.path))