omdev 0.0.0.dev210__py3-none-any.whl → 0.0.0.dev212__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- omdev/.manifests.json +15 -1
- omdev/__about__.py +0 -4
- omdev/amalg/gen.py +2 -3
- omdev/amalg/imports.py +4 -5
- omdev/amalg/manifests.py +7 -10
- omdev/amalg/resources.py +24 -27
- omdev/amalg/srcfiles.py +7 -10
- omdev/amalg/strip.py +4 -5
- omdev/amalg/types.py +1 -1
- omdev/amalg/typing.py +9 -8
- omdev/ci/cache.py +137 -10
- omdev/ci/ci.py +110 -75
- omdev/ci/cli.py +51 -11
- omdev/ci/compose.py +34 -15
- omdev/ci/{dockertars.py → docker.py} +43 -30
- omdev/ci/github/__init__.py +0 -0
- omdev/ci/github/bootstrap.py +11 -0
- omdev/ci/github/cache.py +355 -0
- omdev/ci/github/cacheapi.py +207 -0
- omdev/ci/github/cli.py +39 -0
- omdev/ci/requirements.py +3 -2
- omdev/ci/shell.py +42 -0
- omdev/ci/utils.py +49 -0
- omdev/scripts/ci.py +1734 -473
- omdev/scripts/interp.py +22 -22
- omdev/scripts/pyproject.py +22 -22
- omdev/tokens/__init__.py +0 -0
- omdev/tokens/all.py +35 -0
- omdev/tokens/tokenizert.py +217 -0
- omdev/{tokens.py → tokens/utils.py} +6 -12
- omdev/tools/mkenv.py +131 -0
- omdev/tools/mkrelimp.py +4 -6
- {omdev-0.0.0.dev210.dist-info → omdev-0.0.0.dev212.dist-info}/METADATA +2 -5
- {omdev-0.0.0.dev210.dist-info → omdev-0.0.0.dev212.dist-info}/RECORD +38 -28
- {omdev-0.0.0.dev210.dist-info → omdev-0.0.0.dev212.dist-info}/LICENSE +0 -0
- {omdev-0.0.0.dev210.dist-info → omdev-0.0.0.dev212.dist-info}/WHEEL +0 -0
- {omdev-0.0.0.dev210.dist-info → omdev-0.0.0.dev212.dist-info}/entry_points.txt +0 -0
- {omdev-0.0.0.dev210.dist-info → omdev-0.0.0.dev212.dist-info}/top_level.txt +0 -0
omdev/.manifests.json
CHANGED
@@ -326,11 +326,25 @@
|
|
326
326
|
}
|
327
327
|
}
|
328
328
|
},
|
329
|
+
{
|
330
|
+
"module": ".tools.mkenv",
|
331
|
+
"attr": "_CLI_MODULE",
|
332
|
+
"file": "omdev/tools/mkenv.py",
|
333
|
+
"line": 123,
|
334
|
+
"value": {
|
335
|
+
"$.cli.types.CliModule": {
|
336
|
+
"cmd_name": [
|
337
|
+
"mkenv"
|
338
|
+
],
|
339
|
+
"mod_name": "omdev.tools.mkenv"
|
340
|
+
}
|
341
|
+
}
|
342
|
+
},
|
329
343
|
{
|
330
344
|
"module": ".tools.mkrelimp",
|
331
345
|
"attr": "_CLI_MODULE",
|
332
346
|
"file": "omdev/tools/mkrelimp.py",
|
333
|
-
"line":
|
347
|
+
"line": 146,
|
334
348
|
"value": {
|
335
349
|
"$.cli.types.CliModule": {
|
336
350
|
"cmd_name": "py/mkrelimp",
|
omdev/__about__.py
CHANGED
omdev/amalg/gen.py
CHANGED
@@ -8,11 +8,10 @@ from omlish import collections as col
|
|
8
8
|
from omlish import lang
|
9
9
|
from omlish.lite.runtime import LITE_REQUIRED_PYTHON_VERSION
|
10
10
|
|
11
|
-
from .. import
|
11
|
+
from ..tokens import all as tks
|
12
12
|
from .srcfiles import SrcFile
|
13
13
|
from .srcfiles import make_src_file
|
14
14
|
from .strip import strip_main_lines
|
15
|
-
from .types import Tokens
|
16
15
|
from .typing import Typing
|
17
16
|
|
18
17
|
|
@@ -71,7 +70,7 @@ class AmalgGenerator:
|
|
71
70
|
return self._src_files()[self._main_path]
|
72
71
|
|
73
72
|
@cached.function
|
74
|
-
def _header_lines(self) -> list[
|
73
|
+
def _header_lines(self) -> list[str]:
|
75
74
|
header_lines = []
|
76
75
|
|
77
76
|
if self._main_file().header_lines:
|
omdev/amalg/imports.py
CHANGED
@@ -4,8 +4,7 @@ import typing as ta
|
|
4
4
|
|
5
5
|
from omlish import check
|
6
6
|
|
7
|
-
from .. import
|
8
|
-
from .types import Tokens
|
7
|
+
from ..tokens import all as tks
|
9
8
|
|
10
9
|
|
11
10
|
##
|
@@ -22,11 +21,11 @@ class Import:
|
|
22
21
|
|
23
22
|
mod_path: str | None
|
24
23
|
|
25
|
-
toks: Tokens = dc.field(repr=False)
|
24
|
+
toks: tks.Tokens = dc.field(repr=False)
|
26
25
|
|
27
26
|
|
28
27
|
def make_import(
|
29
|
-
lts: Tokens,
|
28
|
+
lts: tks.Tokens,
|
30
29
|
*,
|
31
30
|
src_path: str,
|
32
31
|
mounts: ta.Mapping[str, str],
|
@@ -90,7 +89,7 @@ def make_import(
|
|
90
89
|
as_=as_,
|
91
90
|
|
92
91
|
src_path=src_path,
|
93
|
-
line=ft.line,
|
92
|
+
line=check.not_none(ft.line),
|
94
93
|
|
95
94
|
mod_path=mod_path,
|
96
95
|
|
omdev/amalg/manifests.py
CHANGED
@@ -1,17 +1,14 @@
|
|
1
1
|
import ast
|
2
2
|
|
3
|
-
import tokenize_rt as trt
|
4
|
-
|
5
3
|
from omlish import check
|
6
4
|
|
7
|
-
from .. import
|
8
|
-
from .types import Tokens
|
5
|
+
from ..tokens import all as tks
|
9
6
|
|
10
7
|
|
11
8
|
##
|
12
9
|
|
13
10
|
|
14
|
-
def is_manifest_comment(line: Tokens) -> bool:
|
11
|
+
def is_manifest_comment(line: tks.Tokens) -> bool:
|
15
12
|
if not line:
|
16
13
|
return False
|
17
14
|
|
@@ -22,10 +19,10 @@ def is_manifest_comment(line: Tokens) -> bool:
|
|
22
19
|
|
23
20
|
|
24
21
|
def comment_out_manifest_comment(
|
25
|
-
line: Tokens,
|
26
|
-
cls: list[Tokens],
|
22
|
+
line: tks.Tokens,
|
23
|
+
cls: list[tks.Tokens],
|
27
24
|
i: int,
|
28
|
-
) -> tuple[list[Tokens], int]:
|
25
|
+
) -> tuple[list[tks.Tokens], int]:
|
29
26
|
mls = [line]
|
30
27
|
while True:
|
31
28
|
mls.append(cls[i])
|
@@ -41,8 +38,8 @@ def comment_out_manifest_comment(
|
|
41
38
|
check.isinstance(check.single(mmod.body), ast.Assign)
|
42
39
|
break
|
43
40
|
|
44
|
-
out: list[Tokens] = [
|
45
|
-
[
|
41
|
+
out: list[tks.Tokens] = [
|
42
|
+
[tks.Token('COMMENT', '# ' + tks.join_toks(ml))]
|
46
43
|
for ml in mls
|
47
44
|
]
|
48
45
|
|
omdev/amalg/resources.py
CHANGED
@@ -4,10 +4,7 @@ import itertools
|
|
4
4
|
import os.path
|
5
5
|
import typing as ta
|
6
6
|
|
7
|
-
import
|
8
|
-
|
9
|
-
from .. import tokens as tks
|
10
|
-
from .types import Tokens
|
7
|
+
from ..tokens import all as tks
|
11
8
|
|
12
9
|
|
13
10
|
##
|
@@ -19,7 +16,7 @@ class RootLevelResourcesRead(ta.NamedTuple):
|
|
19
16
|
resource: str
|
20
17
|
|
21
18
|
|
22
|
-
def is_root_level_resources_read(lts: Tokens) -> RootLevelResourcesRead | None:
|
19
|
+
def is_root_level_resources_read(lts: tks.Tokens) -> RootLevelResourcesRead | None:
|
23
20
|
wts = list(tks.ignore_ws(lts, keep=['INDENT']))
|
24
21
|
|
25
22
|
if not tks.match_toks(wts, [
|
@@ -47,36 +44,36 @@ def is_root_level_resources_read(lts: Tokens) -> RootLevelResourcesRead | None:
|
|
47
44
|
def build_resource_lines(
|
48
45
|
rsrc: RootLevelResourcesRead,
|
49
46
|
path: str,
|
50
|
-
) -> list[Tokens]:
|
47
|
+
) -> list[tks.Tokens]:
|
51
48
|
rf = os.path.join(os.path.dirname(path), rsrc.resource)
|
52
49
|
|
53
50
|
if rsrc.kind == 'binary':
|
54
51
|
with open(rf, 'rb') as bf:
|
55
52
|
rb = bf.read() # noqa
|
56
53
|
|
57
|
-
out: list[Tokens] = [[
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
54
|
+
out: list[tks.Tokens] = [[
|
55
|
+
tks.Token(name='NAME', src=rsrc.variable),
|
56
|
+
tks.Token(name='UNIMPORTANT_WS', src=' '),
|
57
|
+
tks.Token(name='OP', src='='),
|
58
|
+
tks.Token(name='UNIMPORTANT_WS', src=' '),
|
59
|
+
tks.Token(name='NAME', src='base64'),
|
60
|
+
tks.Token(name='OP', src='.'),
|
61
|
+
tks.Token(name='NAME', src='b64decode'),
|
62
|
+
tks.Token(name='OP', src='('),
|
63
|
+
tks.Token(name='NL', src='\n'),
|
67
64
|
]]
|
68
65
|
|
69
66
|
rb64 = base64.b64encode(rb).decode('ascii')
|
70
67
|
for chunk in itertools.batched(rb64, 96):
|
71
68
|
out.append([
|
72
|
-
|
73
|
-
|
74
|
-
|
69
|
+
tks.Token(name='UNIMPORTANT_WS', src=' '),
|
70
|
+
tks.Token(name='STRING', src=f"'{''.join(chunk)}'"),
|
71
|
+
tks.Token(name='NL', src='\n'),
|
75
72
|
])
|
76
73
|
|
77
74
|
out.append([
|
78
|
-
|
79
|
-
|
75
|
+
tks.Token(name='OP', src=')'),
|
76
|
+
tks.Token(name='NEWLINE', src='\n'),
|
80
77
|
])
|
81
78
|
|
82
79
|
return out
|
@@ -87,12 +84,12 @@ def build_resource_lines(
|
|
87
84
|
rt = rt.replace('\\', '\\\\') # Escape backslashes
|
88
85
|
rt = rt.replace('"""', r'\"\"\"')
|
89
86
|
return [[
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
87
|
+
tks.Token(name='NAME', src=rsrc.variable),
|
88
|
+
tks.Token(name='UNIMPORTANT_WS', src=' '),
|
89
|
+
tks.Token(name='OP', src='='),
|
90
|
+
tks.Token(name='UNIMPORTANT_WS', src=' '),
|
91
|
+
tks.Token(name='STRING', src=f'"""\\\n{rt}""" # noqa\n'),
|
92
|
+
tks.Token(name='NEWLINE', src=''),
|
96
93
|
]]
|
97
94
|
|
98
95
|
else:
|
omdev/amalg/srcfiles.py
CHANGED
@@ -1,12 +1,10 @@
|
|
1
1
|
import dataclasses as dc
|
2
2
|
import typing as ta
|
3
3
|
|
4
|
-
import tokenize_rt as trt
|
5
|
-
|
6
4
|
from omlish import collections as col
|
7
5
|
from omlish import lang
|
8
6
|
|
9
|
-
from .. import
|
7
|
+
from ..tokens import all as tks
|
10
8
|
from .imports import Import
|
11
9
|
from .imports import make_import
|
12
10
|
from .manifests import comment_out_manifest_comment
|
@@ -15,7 +13,6 @@ from .resources import build_resource_lines
|
|
15
13
|
from .resources import is_root_level_resources_read
|
16
14
|
from .strip import split_header_lines
|
17
15
|
from .strip import strip_header_lines
|
18
|
-
from .types import Tokens
|
19
16
|
from .typing import Typing
|
20
17
|
from .typing import is_root_level_if_type_checking_block
|
21
18
|
from .typing import make_typing
|
@@ -30,13 +27,13 @@ class SrcFile:
|
|
30
27
|
path: str
|
31
28
|
|
32
29
|
src: str = dc.field(repr=False)
|
33
|
-
tokens: Tokens = dc.field(repr=False)
|
34
|
-
lines: ta.Sequence[Tokens] = dc.field(repr=False)
|
30
|
+
tokens: tks.Tokens = dc.field(repr=False)
|
31
|
+
lines: ta.Sequence[tks.Tokens] = dc.field(repr=False)
|
35
32
|
|
36
|
-
header_lines: ta.Sequence[Tokens] = dc.field(repr=False)
|
33
|
+
header_lines: ta.Sequence[tks.Tokens] = dc.field(repr=False)
|
37
34
|
imports: ta.Sequence[Import] = dc.field(repr=False)
|
38
35
|
typings: ta.Sequence[Typing] = dc.field(repr=False)
|
39
|
-
content_lines: ta.Sequence[Tokens] = dc.field(repr=False)
|
36
|
+
content_lines: ta.Sequence[tks.Tokens] = dc.field(repr=False)
|
40
37
|
|
41
38
|
ruff_noqa: ta.AbstractSet[str] = dc.field(repr=False)
|
42
39
|
|
@@ -51,7 +48,7 @@ def make_src_file(
|
|
51
48
|
with open(path) as f:
|
52
49
|
src = f.read().strip()
|
53
50
|
|
54
|
-
tokens =
|
51
|
+
tokens = tks.src_to_tokens(src)
|
55
52
|
lines = tks.split_lines(tokens)
|
56
53
|
|
57
54
|
header_lines, cls = split_header_lines(lines)
|
@@ -61,7 +58,7 @@ def make_src_file(
|
|
61
58
|
|
62
59
|
imps: list[Import] = []
|
63
60
|
tys: list[Typing] = []
|
64
|
-
ctls: list[Tokens] = []
|
61
|
+
ctls: list[tks.Tokens] = []
|
65
62
|
|
66
63
|
has_binary_resources = False
|
67
64
|
|
omdev/amalg/strip.py
CHANGED
@@ -2,8 +2,7 @@ import re
|
|
2
2
|
import typing as ta
|
3
3
|
|
4
4
|
from .. import magic
|
5
|
-
from .. import
|
6
|
-
from .types import Tokens
|
5
|
+
from ..tokens import all as tks
|
7
6
|
|
8
7
|
|
9
8
|
##
|
@@ -12,7 +11,7 @@ from .types import Tokens
|
|
12
11
|
HEADER_NAMES = (*tks.WS_NAMES, 'COMMENT', 'STRING')
|
13
12
|
|
14
13
|
|
15
|
-
def split_header_lines(lines: ta.Iterable[Tokens]) -> tuple[list[Tokens], list[Tokens]]:
|
14
|
+
def split_header_lines(lines: ta.Iterable[tks.Tokens]) -> tuple[list[tks.Tokens], list[tks.Tokens]]:
|
16
15
|
ws = []
|
17
16
|
nws = []
|
18
17
|
for line in (it := iter(lines)):
|
@@ -31,7 +30,7 @@ def split_header_lines(lines: ta.Iterable[Tokens]) -> tuple[list[Tokens], list[T
|
|
31
30
|
IF_MAIN_PAT = re.compile(r'if\s+__name__\s+==\s+[\'"]__main__[\'"]\s*:')
|
32
31
|
|
33
32
|
|
34
|
-
def strip_main_lines(cls: ta.Sequence[Tokens]) -> list[Tokens]:
|
33
|
+
def strip_main_lines(cls: ta.Sequence[tks.Tokens]) -> list[tks.Tokens]:
|
35
34
|
out = []
|
36
35
|
|
37
36
|
for l in (it := iter(cls)):
|
@@ -59,7 +58,7 @@ STRIPPED_HEADER_PAT = magic.compile_magic_style_pat(
|
|
59
58
|
)
|
60
59
|
|
61
60
|
|
62
|
-
def strip_header_lines(hls: ta.Sequence[Tokens]) -> list[Tokens]:
|
61
|
+
def strip_header_lines(hls: ta.Sequence[tks.Tokens]) -> list[tks.Tokens]:
|
63
62
|
if hls and tks.join_toks(hls[0]).startswith('#!'):
|
64
63
|
hls = hls[1:]
|
65
64
|
out = []
|
omdev/amalg/types.py
CHANGED
omdev/amalg/typing.py
CHANGED
@@ -1,7 +1,8 @@
|
|
1
1
|
import dataclasses as dc
|
2
2
|
|
3
|
-
from
|
4
|
-
|
3
|
+
from omlish import check
|
4
|
+
|
5
|
+
from ..tokens import all as tks
|
5
6
|
|
6
7
|
|
7
8
|
##
|
@@ -18,11 +19,11 @@ class Typing:
|
|
18
19
|
src_path: str
|
19
20
|
line: int
|
20
21
|
|
21
|
-
toks: Tokens = dc.field(repr=False)
|
22
|
+
toks: tks.Tokens = dc.field(repr=False)
|
22
23
|
|
23
24
|
|
24
25
|
def _is_typing(
|
25
|
-
lts: Tokens,
|
26
|
+
lts: tks.Tokens,
|
26
27
|
*,
|
27
28
|
exclude_newtypes: bool = False,
|
28
29
|
) -> bool:
|
@@ -48,7 +49,7 @@ def _is_typing(
|
|
48
49
|
|
49
50
|
|
50
51
|
def make_typing(
|
51
|
-
lts: Tokens,
|
52
|
+
lts: tks.Tokens,
|
52
53
|
*,
|
53
54
|
src_path: str,
|
54
55
|
) -> Typing | None:
|
@@ -63,7 +64,7 @@ def make_typing(
|
|
63
64
|
src=tks.join_toks(lts),
|
64
65
|
|
65
66
|
src_path=src_path,
|
66
|
-
line=ft.line,
|
67
|
+
line=check.not_none(ft.line),
|
67
68
|
|
68
69
|
toks=lts,
|
69
70
|
)
|
@@ -72,7 +73,7 @@ def make_typing(
|
|
72
73
|
##
|
73
74
|
|
74
75
|
|
75
|
-
def is_root_level_if_type_checking_block(lts: Tokens) -> bool:
|
76
|
+
def is_root_level_if_type_checking_block(lts: tks.Tokens) -> bool:
|
76
77
|
return tks.match_toks(tks.ignore_ws(lts, keep=['INDENT']), [
|
77
78
|
('NAME', 'if'),
|
78
79
|
('NAME', 'ta'),
|
@@ -83,7 +84,7 @@ def is_root_level_if_type_checking_block(lts: Tokens) -> bool:
|
|
83
84
|
|
84
85
|
|
85
86
|
def skip_root_level_if_type_checking_block(
|
86
|
-
cls: list[Tokens],
|
87
|
+
cls: list[tks.Tokens],
|
87
88
|
i: int,
|
88
89
|
) -> int:
|
89
90
|
def skip_block():
|
omdev/ci/cache.py
CHANGED
@@ -2,21 +2,24 @@
|
|
2
2
|
# @omlish-lite
|
3
3
|
import abc
|
4
4
|
import os.path
|
5
|
+
import shlex
|
5
6
|
import shutil
|
6
7
|
import typing as ta
|
7
8
|
|
9
|
+
from .shell import ShellCmd
|
8
10
|
|
9
|
-
|
11
|
+
|
12
|
+
##
|
10
13
|
|
11
14
|
|
12
15
|
@abc.abstractmethod
|
13
16
|
class FileCache(abc.ABC):
|
14
17
|
@abc.abstractmethod
|
15
|
-
def get_file(self,
|
18
|
+
def get_file(self, key: str) -> ta.Optional[str]:
|
16
19
|
raise NotImplementedError
|
17
20
|
|
18
21
|
@abc.abstractmethod
|
19
|
-
def put_file(self,
|
22
|
+
def put_file(self, key: str, file_path: str) -> ta.Optional[str]:
|
20
23
|
raise NotImplementedError
|
21
24
|
|
22
25
|
|
@@ -29,13 +32,137 @@ class DirectoryFileCache(FileCache):
|
|
29
32
|
|
30
33
|
self._dir = dir
|
31
34
|
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
+
#
|
36
|
+
|
37
|
+
def get_cache_file_path(
|
38
|
+
self,
|
39
|
+
key: str,
|
40
|
+
*,
|
41
|
+
make_dirs: bool = False,
|
42
|
+
) -> str:
|
43
|
+
if make_dirs:
|
44
|
+
os.makedirs(self._dir, exist_ok=True)
|
45
|
+
return os.path.join(self._dir, key)
|
46
|
+
|
47
|
+
def format_incomplete_file(self, f: str) -> str:
|
48
|
+
return os.path.join(os.path.dirname(f), f'_{os.path.basename(f)}.incomplete')
|
49
|
+
|
50
|
+
#
|
51
|
+
|
52
|
+
def get_file(self, key: str) -> ta.Optional[str]:
|
53
|
+
cache_file_path = self.get_cache_file_path(key)
|
54
|
+
if not os.path.exists(cache_file_path):
|
35
55
|
return None
|
36
|
-
return
|
56
|
+
return cache_file_path
|
37
57
|
|
38
|
-
def put_file(self, file_path: str) -> None:
|
39
|
-
|
40
|
-
cache_file_path = os.path.join(self._dir, os.path.basename(file_path))
|
58
|
+
def put_file(self, key: str, file_path: str) -> None:
|
59
|
+
cache_file_path = self.get_cache_file_path(key, make_dirs=True)
|
41
60
|
shutil.copyfile(file_path, cache_file_path)
|
61
|
+
|
62
|
+
|
63
|
+
##
|
64
|
+
|
65
|
+
|
66
|
+
class ShellCache(abc.ABC):
|
67
|
+
@abc.abstractmethod
|
68
|
+
def get_file_cmd(self, key: str) -> ta.Optional[ShellCmd]:
|
69
|
+
raise NotImplementedError
|
70
|
+
|
71
|
+
class PutFileCmdContext(abc.ABC):
|
72
|
+
def __init__(self) -> None:
|
73
|
+
super().__init__()
|
74
|
+
|
75
|
+
self._state: ta.Literal['open', 'committed', 'aborted'] = 'open'
|
76
|
+
|
77
|
+
@property
|
78
|
+
def state(self) -> ta.Literal['open', 'committed', 'aborted']:
|
79
|
+
return self._state
|
80
|
+
|
81
|
+
#
|
82
|
+
|
83
|
+
@property
|
84
|
+
@abc.abstractmethod
|
85
|
+
def cmd(self) -> ShellCmd:
|
86
|
+
raise NotImplementedError
|
87
|
+
|
88
|
+
#
|
89
|
+
|
90
|
+
def __enter__(self):
|
91
|
+
return self
|
92
|
+
|
93
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
94
|
+
if exc_val is None:
|
95
|
+
self.commit()
|
96
|
+
else:
|
97
|
+
self.abort()
|
98
|
+
|
99
|
+
#
|
100
|
+
|
101
|
+
@abc.abstractmethod
|
102
|
+
def _commit(self) -> None:
|
103
|
+
raise NotImplementedError
|
104
|
+
|
105
|
+
def commit(self) -> None:
|
106
|
+
if self._state == 'committed':
|
107
|
+
return
|
108
|
+
elif self._state == 'open':
|
109
|
+
self._commit()
|
110
|
+
self._state = 'committed'
|
111
|
+
else:
|
112
|
+
raise RuntimeError(self._state)
|
113
|
+
|
114
|
+
#
|
115
|
+
|
116
|
+
@abc.abstractmethod
|
117
|
+
def _abort(self) -> None:
|
118
|
+
raise NotImplementedError
|
119
|
+
|
120
|
+
def abort(self) -> None:
|
121
|
+
if self._state == 'aborted':
|
122
|
+
return
|
123
|
+
elif self._state == 'open':
|
124
|
+
self._abort()
|
125
|
+
self._state = 'committed'
|
126
|
+
else:
|
127
|
+
raise RuntimeError(self._state)
|
128
|
+
|
129
|
+
@abc.abstractmethod
|
130
|
+
def put_file_cmd(self, key: str) -> PutFileCmdContext:
|
131
|
+
raise NotImplementedError
|
132
|
+
|
133
|
+
|
134
|
+
#
|
135
|
+
|
136
|
+
|
137
|
+
class DirectoryShellCache(ShellCache):
|
138
|
+
def __init__(self, dfc: DirectoryFileCache) -> None:
|
139
|
+
super().__init__()
|
140
|
+
|
141
|
+
self._dfc = dfc
|
142
|
+
|
143
|
+
def get_file_cmd(self, key: str) -> ta.Optional[ShellCmd]:
|
144
|
+
f = self._dfc.get_file(key)
|
145
|
+
if f is None:
|
146
|
+
return None
|
147
|
+
return ShellCmd(f'cat {shlex.quote(f)}')
|
148
|
+
|
149
|
+
class _PutFileCmdContext(ShellCache.PutFileCmdContext): # noqa
|
150
|
+
def __init__(self, tf: str, f: str) -> None:
|
151
|
+
super().__init__()
|
152
|
+
|
153
|
+
self._tf = tf
|
154
|
+
self._f = f
|
155
|
+
|
156
|
+
@property
|
157
|
+
def cmd(self) -> ShellCmd:
|
158
|
+
return ShellCmd(f'cat > {shlex.quote(self._tf)}')
|
159
|
+
|
160
|
+
def _commit(self) -> None:
|
161
|
+
os.replace(self._tf, self._f)
|
162
|
+
|
163
|
+
def _abort(self) -> None:
|
164
|
+
os.unlink(self._tf)
|
165
|
+
|
166
|
+
def put_file_cmd(self, key: str) -> ShellCache.PutFileCmdContext:
|
167
|
+
f = self._dfc.get_cache_file_path(key, make_dirs=True)
|
168
|
+
return self._PutFileCmdContext(self._dfc.format_incomplete_file(f), f)
|