omdev 0.0.0.dev222__py3-none-any.whl → 0.0.0.dev224__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- omdev/ci/cache.py +148 -23
- omdev/ci/ci.py +50 -110
- omdev/ci/cli.py +24 -23
- omdev/ci/docker/__init__.py +0 -0
- omdev/ci/docker/buildcaching.py +69 -0
- omdev/ci/docker/cache.py +57 -0
- omdev/ci/docker/cacheserved.py +262 -0
- omdev/ci/{docker.py → docker/cmds.py} +1 -44
- omdev/ci/docker/dataserver.py +204 -0
- omdev/ci/docker/imagepulling.py +65 -0
- omdev/ci/docker/inject.py +37 -0
- omdev/ci/docker/packing.py +72 -0
- omdev/ci/docker/repositories.py +40 -0
- omdev/ci/docker/utils.py +48 -0
- omdev/ci/github/cache.py +35 -6
- omdev/ci/github/client.py +9 -2
- omdev/ci/github/inject.py +30 -0
- omdev/ci/inject.py +61 -0
- omdev/ci/utils.py +0 -49
- omdev/dataserver/__init__.py +1 -0
- omdev/dataserver/handlers.py +198 -0
- omdev/dataserver/http.py +69 -0
- omdev/dataserver/routes.py +49 -0
- omdev/dataserver/server.py +90 -0
- omdev/dataserver/targets.py +121 -0
- omdev/oci/building.py +107 -9
- omdev/oci/compression.py +8 -0
- omdev/oci/data.py +43 -0
- omdev/oci/datarefs.py +90 -50
- omdev/oci/dataserver.py +64 -0
- omdev/oci/loading.py +20 -0
- omdev/oci/media.py +20 -0
- omdev/oci/pack/__init__.py +0 -0
- omdev/oci/pack/packing.py +185 -0
- omdev/oci/pack/repositories.py +162 -0
- omdev/oci/pack/unpacking.py +204 -0
- omdev/oci/repositories.py +84 -2
- omdev/oci/tars.py +144 -0
- omdev/pyproject/resources/python.sh +1 -1
- omdev/scripts/ci.py +2137 -512
- omdev/scripts/interp.py +119 -22
- omdev/scripts/pyproject.py +141 -28
- {omdev-0.0.0.dev222.dist-info → omdev-0.0.0.dev224.dist-info}/METADATA +2 -2
- {omdev-0.0.0.dev222.dist-info → omdev-0.0.0.dev224.dist-info}/RECORD +48 -23
- {omdev-0.0.0.dev222.dist-info → omdev-0.0.0.dev224.dist-info}/LICENSE +0 -0
- {omdev-0.0.0.dev222.dist-info → omdev-0.0.0.dev224.dist-info}/WHEEL +0 -0
- {omdev-0.0.0.dev222.dist-info → omdev-0.0.0.dev224.dist-info}/entry_points.txt +0 -0
- {omdev-0.0.0.dev222.dist-info → omdev-0.0.0.dev224.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,72 @@
|
|
1
|
+
# ruff: noqa: UP006 UP007
|
2
|
+
import asyncio
|
3
|
+
import os.path
|
4
|
+
import shlex
|
5
|
+
import typing as ta
|
6
|
+
|
7
|
+
from omlish.asyncs.asyncio.subprocesses import asyncio_subprocesses
|
8
|
+
from omlish.lite.cached import async_cached_nullary
|
9
|
+
from omlish.lite.cached import cached_nullary
|
10
|
+
from omlish.lite.contextmanagers import ExitStacked
|
11
|
+
from omlish.logs.timing import log_timing_context
|
12
|
+
from omlish.os.temp import temp_dir_context
|
13
|
+
|
14
|
+
from ...oci.building import BuiltOciImageIndexRepository
|
15
|
+
from ...oci.pack.repositories import OciPackedRepositoryBuilder
|
16
|
+
from ...oci.repositories import DirectoryOciRepository
|
17
|
+
|
18
|
+
|
19
|
+
##
|
20
|
+
|
21
|
+
|
22
|
+
class PackedDockerImageIndexRepositoryBuilder(ExitStacked):
|
23
|
+
def __init__(
|
24
|
+
self,
|
25
|
+
*,
|
26
|
+
image_id: str,
|
27
|
+
|
28
|
+
temp_dir: ta.Optional[str] = None,
|
29
|
+
) -> None:
|
30
|
+
super().__init__()
|
31
|
+
|
32
|
+
self._image_id = image_id
|
33
|
+
|
34
|
+
self._given_temp_dir = temp_dir
|
35
|
+
|
36
|
+
@cached_nullary
|
37
|
+
def _temp_dir(self) -> str:
|
38
|
+
if (given := self._given_temp_dir) is not None:
|
39
|
+
return given
|
40
|
+
else:
|
41
|
+
return self._enter_context(temp_dir_context()) # noqa
|
42
|
+
|
43
|
+
#
|
44
|
+
|
45
|
+
@async_cached_nullary
|
46
|
+
async def _save_to_dir(self) -> str:
|
47
|
+
save_dir = os.path.join(self._temp_dir(), 'built-image')
|
48
|
+
os.mkdir(save_dir)
|
49
|
+
|
50
|
+
with log_timing_context(f'Saving docker image {self._image_id}'):
|
51
|
+
await asyncio_subprocesses.check_call(
|
52
|
+
' | '.join([
|
53
|
+
f'docker save {shlex.quote(self._image_id)}',
|
54
|
+
f'tar x -C {shlex.quote(save_dir)}',
|
55
|
+
]),
|
56
|
+
shell=True,
|
57
|
+
)
|
58
|
+
|
59
|
+
return save_dir
|
60
|
+
|
61
|
+
#
|
62
|
+
|
63
|
+
@async_cached_nullary
|
64
|
+
async def build(self) -> BuiltOciImageIndexRepository:
|
65
|
+
saved_dir = await self._save_to_dir()
|
66
|
+
|
67
|
+
with OciPackedRepositoryBuilder(
|
68
|
+
DirectoryOciRepository(saved_dir),
|
69
|
+
|
70
|
+
temp_dir=self._temp_dir(),
|
71
|
+
) as prb:
|
72
|
+
return await asyncio.get_running_loop().run_in_executor(None, prb.build)
|
@@ -0,0 +1,40 @@
|
|
1
|
+
# ruff: noqa: UP006 UP007
|
2
|
+
import abc
|
3
|
+
import contextlib
|
4
|
+
import shlex
|
5
|
+
import typing as ta
|
6
|
+
|
7
|
+
from omlish.asyncs.asyncio.subprocesses import asyncio_subprocesses
|
8
|
+
from omlish.lite.timing import log_timing_context
|
9
|
+
from omlish.os.temp import temp_dir_context
|
10
|
+
|
11
|
+
from ...oci.repositories import DirectoryOciRepository
|
12
|
+
from ...oci.repositories import OciRepository
|
13
|
+
|
14
|
+
|
15
|
+
##
|
16
|
+
|
17
|
+
|
18
|
+
class DockerImageRepositoryOpener(abc.ABC):
|
19
|
+
@abc.abstractmethod
|
20
|
+
def open_docker_image_repository(self, image: str) -> ta.AsyncContextManager[OciRepository]:
|
21
|
+
raise NotImplementedError
|
22
|
+
|
23
|
+
|
24
|
+
#
|
25
|
+
|
26
|
+
|
27
|
+
class DockerImageRepositoryOpenerImpl(DockerImageRepositoryOpener):
|
28
|
+
@contextlib.asynccontextmanager
|
29
|
+
async def open_docker_image_repository(self, image: str) -> ta.AsyncGenerator[OciRepository, None]:
|
30
|
+
with temp_dir_context() as save_dir:
|
31
|
+
with log_timing_context(f'Saving docker image {image}'):
|
32
|
+
await asyncio_subprocesses.check_call(
|
33
|
+
' | '.join([
|
34
|
+
f'docker save {shlex.quote(image)}',
|
35
|
+
f'tar x -C {shlex.quote(save_dir)}',
|
36
|
+
]),
|
37
|
+
shell=True,
|
38
|
+
)
|
39
|
+
|
40
|
+
yield DirectoryOciRepository(save_dir)
|
omdev/ci/docker/utils.py
ADDED
@@ -0,0 +1,48 @@
|
|
1
|
+
# ruff: noqa: UP006 UP007
|
2
|
+
"""
|
3
|
+
TODO:
|
4
|
+
- some less stupid Dockerfile hash
|
5
|
+
- doesn't change too much though
|
6
|
+
"""
|
7
|
+
import contextlib
|
8
|
+
import json
|
9
|
+
import tarfile
|
10
|
+
|
11
|
+
from omlish.lite.check import check
|
12
|
+
|
13
|
+
from ..utils import sha256_str
|
14
|
+
|
15
|
+
|
16
|
+
##
|
17
|
+
|
18
|
+
|
19
|
+
def build_docker_file_hash(docker_file: str) -> str:
|
20
|
+
with open(docker_file) as f:
|
21
|
+
contents = f.read()
|
22
|
+
|
23
|
+
return sha256_str(contents)
|
24
|
+
|
25
|
+
|
26
|
+
##
|
27
|
+
|
28
|
+
|
29
|
+
def read_docker_tar_image_tag(tar_file: str) -> str:
|
30
|
+
with tarfile.open(tar_file) as tf:
|
31
|
+
with contextlib.closing(check.not_none(tf.extractfile('manifest.json'))) as mf:
|
32
|
+
m = mf.read()
|
33
|
+
|
34
|
+
manifests = json.loads(m.decode('utf-8'))
|
35
|
+
manifest = check.single(manifests)
|
36
|
+
tag = check.non_empty_str(check.single(manifest['RepoTags']))
|
37
|
+
return tag
|
38
|
+
|
39
|
+
|
40
|
+
def read_docker_tar_image_id(tar_file: str) -> str:
|
41
|
+
with tarfile.open(tar_file) as tf:
|
42
|
+
with contextlib.closing(check.not_none(tf.extractfile('index.json'))) as mf:
|
43
|
+
i = mf.read()
|
44
|
+
|
45
|
+
index = json.loads(i.decode('utf-8'))
|
46
|
+
manifest = check.single(index['manifests'])
|
47
|
+
image_id = check.non_empty_str(manifest['digest'])
|
48
|
+
return image_id
|
omdev/ci/github/cache.py
CHANGED
@@ -1,12 +1,16 @@
|
|
1
1
|
# ruff: noqa: UP006 UP007
|
2
|
+
import dataclasses as dc
|
2
3
|
import os.path
|
3
4
|
import typing as ta
|
4
5
|
|
5
6
|
from omlish.lite.check import check
|
6
7
|
from omlish.os.files import unlinking_if_exists
|
7
8
|
|
9
|
+
from ..cache import CacheVersion
|
10
|
+
from ..cache import DataCache
|
8
11
|
from ..cache import DirectoryFileCache
|
9
12
|
from ..cache import FileCache
|
13
|
+
from ..cache import FileCacheDataCache
|
10
14
|
from .client import GithubCacheClient
|
11
15
|
from .client import GithubCacheServiceV1Client
|
12
16
|
|
@@ -14,17 +18,23 @@ from .client import GithubCacheServiceV1Client
|
|
14
18
|
##
|
15
19
|
|
16
20
|
|
17
|
-
class
|
21
|
+
class GithubCache(FileCache, DataCache):
|
22
|
+
@dc.dataclass(frozen=True)
|
23
|
+
class Config:
|
24
|
+
dir: str
|
25
|
+
|
18
26
|
def __init__(
|
19
27
|
self,
|
20
|
-
|
28
|
+
config: Config,
|
21
29
|
*,
|
22
30
|
client: ta.Optional[GithubCacheClient] = None,
|
23
|
-
|
31
|
+
version: ta.Optional[CacheVersion] = None,
|
24
32
|
) -> None:
|
25
|
-
super().__init__(
|
33
|
+
super().__init__(
|
34
|
+
version=version,
|
35
|
+
)
|
26
36
|
|
27
|
-
self.
|
37
|
+
self._config = config
|
28
38
|
|
29
39
|
if client is None:
|
30
40
|
client = GithubCacheServiceV1Client(
|
@@ -33,10 +43,14 @@ class GithubFileCache(FileCache):
|
|
33
43
|
self._client: GithubCacheClient = client
|
34
44
|
|
35
45
|
self._local = DirectoryFileCache(
|
36
|
-
|
46
|
+
DirectoryFileCache.Config(
|
47
|
+
dir=check.non_empty_str(config.dir),
|
48
|
+
),
|
37
49
|
version=self._version,
|
38
50
|
)
|
39
51
|
|
52
|
+
#
|
53
|
+
|
40
54
|
async def get_file(self, key: str) -> ta.Optional[str]:
|
41
55
|
local_file = self._local.get_cache_file_path(key)
|
42
56
|
if os.path.exists(local_file):
|
@@ -69,3 +83,18 @@ class GithubFileCache(FileCache):
|
|
69
83
|
await self._client.upload_file(key, cache_file_path)
|
70
84
|
|
71
85
|
return cache_file_path
|
86
|
+
|
87
|
+
#
|
88
|
+
|
89
|
+
async def get_data(self, key: str) -> ta.Optional[DataCache.Data]:
|
90
|
+
local_file = self._local.get_cache_file_path(key)
|
91
|
+
if os.path.exists(local_file):
|
92
|
+
return DataCache.FileData(local_file)
|
93
|
+
|
94
|
+
if (entry := await self._client.get_entry(key)) is None:
|
95
|
+
return None
|
96
|
+
|
97
|
+
return DataCache.UrlData(check.non_empty_str(self._client.get_entry_url(entry)))
|
98
|
+
|
99
|
+
async def put_data(self, key: str, data: DataCache.Data) -> None:
|
100
|
+
await FileCacheDataCache(self).put_data(key, data)
|
omdev/ci/github/client.py
CHANGED
@@ -13,9 +13,9 @@ from omlish.asyncs.asyncio.asyncio import asyncio_wait_concurrent
|
|
13
13
|
from omlish.lite.check import check
|
14
14
|
from omlish.lite.json import json_dumps_compact
|
15
15
|
from omlish.lite.logs import log
|
16
|
+
from omlish.lite.timing import log_timing_context
|
16
17
|
|
17
18
|
from ..consts import CI_CACHE_VERSION
|
18
|
-
from ..utils import log_timing_context
|
19
19
|
from .api import GithubCacheServiceV1
|
20
20
|
from .env import register_github_env_var
|
21
21
|
|
@@ -31,6 +31,9 @@ class GithubCacheClient(abc.ABC):
|
|
31
31
|
def get_entry(self, key: str) -> ta.Awaitable[ta.Optional[Entry]]:
|
32
32
|
raise NotImplementedError
|
33
33
|
|
34
|
+
def get_entry_url(self, entry: Entry) -> ta.Optional[str]:
|
35
|
+
return None
|
36
|
+
|
34
37
|
@abc.abstractmethod
|
35
38
|
def download_file(self, entry: Entry, out_file: str) -> ta.Awaitable[None]:
|
36
39
|
raise NotImplementedError
|
@@ -97,7 +100,7 @@ class GithubCacheServiceV1BaseClient(GithubCacheClient, abc.ABC):
|
|
97
100
|
def _get_loop(self) -> asyncio.AbstractEventLoop:
|
98
101
|
if (loop := self._given_loop) is not None:
|
99
102
|
return loop
|
100
|
-
return asyncio.
|
103
|
+
return asyncio.get_running_loop()
|
101
104
|
|
102
105
|
#
|
103
106
|
|
@@ -225,6 +228,10 @@ class GithubCacheServiceV1BaseClient(GithubCacheClient, abc.ABC):
|
|
225
228
|
class Entry(GithubCacheClient.Entry):
|
226
229
|
artifact: GithubCacheServiceV1.ArtifactCacheEntry
|
227
230
|
|
231
|
+
def get_entry_url(self, entry: GithubCacheClient.Entry) -> ta.Optional[str]:
|
232
|
+
entry1 = check.isinstance(entry, self.Entry)
|
233
|
+
return entry1.artifact.cache_key
|
234
|
+
|
228
235
|
#
|
229
236
|
|
230
237
|
def build_get_entry_url_path(self, *keys: str) -> str:
|
@@ -0,0 +1,30 @@
|
|
1
|
+
# ruff: noqa: UP006 UP007
|
2
|
+
import typing as ta
|
3
|
+
|
4
|
+
from omlish.lite.inject import InjectorBindingOrBindings
|
5
|
+
from omlish.lite.inject import InjectorBindings
|
6
|
+
from omlish.lite.inject import inj
|
7
|
+
|
8
|
+
from ..cache import FileCache
|
9
|
+
from .cache import GithubCache
|
10
|
+
|
11
|
+
|
12
|
+
##
|
13
|
+
|
14
|
+
|
15
|
+
def bind_github(
|
16
|
+
*,
|
17
|
+
cache_dir: ta.Optional[str] = None,
|
18
|
+
) -> InjectorBindings:
|
19
|
+
lst: ta.List[InjectorBindingOrBindings] = []
|
20
|
+
|
21
|
+
if cache_dir is not None:
|
22
|
+
lst.extend([
|
23
|
+
inj.bind(GithubCache.Config(
|
24
|
+
dir=cache_dir,
|
25
|
+
)),
|
26
|
+
inj.bind(GithubCache, singleton=True),
|
27
|
+
inj.bind(FileCache, to_key=GithubCache),
|
28
|
+
])
|
29
|
+
|
30
|
+
return inj.as_bindings(*lst)
|
omdev/ci/inject.py
ADDED
@@ -0,0 +1,61 @@
|
|
1
|
+
# ruff: noqa: UP006 UP007
|
2
|
+
import typing as ta
|
3
|
+
|
4
|
+
from omlish.lite.inject import InjectorBindingOrBindings
|
5
|
+
from omlish.lite.inject import InjectorBindings
|
6
|
+
from omlish.lite.inject import inj
|
7
|
+
|
8
|
+
from .cache import DirectoryFileCache
|
9
|
+
from .cache import FileCache
|
10
|
+
from .ci import Ci
|
11
|
+
from .docker.buildcaching import DockerBuildCachingImpl
|
12
|
+
from .docker.imagepulling import DockerImagePullingImpl
|
13
|
+
from .docker.inject import bind_docker
|
14
|
+
from .github.inject import bind_github
|
15
|
+
|
16
|
+
|
17
|
+
##
|
18
|
+
|
19
|
+
|
20
|
+
def bind_ci(
|
21
|
+
*,
|
22
|
+
config: Ci.Config,
|
23
|
+
|
24
|
+
github: bool = False,
|
25
|
+
|
26
|
+
cache_dir: ta.Optional[str] = None,
|
27
|
+
) -> InjectorBindings:
|
28
|
+
lst: ta.List[InjectorBindingOrBindings] = [ # noqa
|
29
|
+
inj.bind(config),
|
30
|
+
inj.bind(Ci, singleton=True),
|
31
|
+
]
|
32
|
+
|
33
|
+
lst.append(bind_docker(
|
34
|
+
build_caching_config=DockerBuildCachingImpl.Config(
|
35
|
+
service=config.service,
|
36
|
+
|
37
|
+
always_build=config.always_build,
|
38
|
+
),
|
39
|
+
|
40
|
+
image_pulling_config=DockerImagePullingImpl.Config(
|
41
|
+
always_pull=config.always_pull,
|
42
|
+
),
|
43
|
+
))
|
44
|
+
|
45
|
+
if cache_dir is not None:
|
46
|
+
if github:
|
47
|
+
lst.append(bind_github(
|
48
|
+
cache_dir=cache_dir,
|
49
|
+
))
|
50
|
+
|
51
|
+
else:
|
52
|
+
lst.extend([
|
53
|
+
inj.bind(DirectoryFileCache.Config(
|
54
|
+
dir=cache_dir,
|
55
|
+
)),
|
56
|
+
inj.bind(DirectoryFileCache, singleton=True),
|
57
|
+
inj.bind(FileCache, to_key=DirectoryFileCache),
|
58
|
+
|
59
|
+
])
|
60
|
+
|
61
|
+
return inj.as_bindings(*lst)
|
omdev/ci/utils.py
CHANGED
@@ -1,11 +1,7 @@
|
|
1
1
|
# ruff: noqa: UP006 UP007
|
2
2
|
import hashlib
|
3
|
-
import logging
|
4
|
-
import time
|
5
3
|
import typing as ta
|
6
4
|
|
7
|
-
from omlish.lite.logs import log
|
8
|
-
|
9
5
|
|
10
6
|
##
|
11
7
|
|
@@ -22,48 +18,3 @@ def read_yaml_file(yaml_file: str) -> ta.Any:
|
|
22
18
|
|
23
19
|
def sha256_str(s: str) -> str:
|
24
20
|
return hashlib.sha256(s.encode('utf-8')).hexdigest()
|
25
|
-
|
26
|
-
|
27
|
-
##
|
28
|
-
|
29
|
-
|
30
|
-
class LogTimingContext:
|
31
|
-
DEFAULT_LOG: ta.ClassVar[logging.Logger] = log
|
32
|
-
|
33
|
-
def __init__(
|
34
|
-
self,
|
35
|
-
description: str,
|
36
|
-
*,
|
37
|
-
log: ta.Optional[logging.Logger] = None, # noqa
|
38
|
-
level: int = logging.DEBUG,
|
39
|
-
) -> None:
|
40
|
-
super().__init__()
|
41
|
-
|
42
|
-
self._description = description
|
43
|
-
self._log = log if log is not None else self.DEFAULT_LOG
|
44
|
-
self._level = level
|
45
|
-
|
46
|
-
def set_description(self, description: str) -> 'LogTimingContext':
|
47
|
-
self._description = description
|
48
|
-
return self
|
49
|
-
|
50
|
-
_begin_time: float
|
51
|
-
_end_time: float
|
52
|
-
|
53
|
-
def __enter__(self) -> 'LogTimingContext':
|
54
|
-
self._begin_time = time.time()
|
55
|
-
|
56
|
-
self._log.log(self._level, f'Begin : {self._description}') # noqa
|
57
|
-
|
58
|
-
return self
|
59
|
-
|
60
|
-
def __exit__(self, exc_type, exc_val, exc_tb):
|
61
|
-
self._end_time = time.time()
|
62
|
-
|
63
|
-
self._log.log(
|
64
|
-
self._level,
|
65
|
-
f'End : {self._description} - {self._end_time - self._begin_time:0.2f} s elapsed',
|
66
|
-
)
|
67
|
-
|
68
|
-
|
69
|
-
log_timing_context = LogTimingContext
|
@@ -0,0 +1 @@
|
|
1
|
+
# @omlish-lite
|
@@ -0,0 +1,198 @@
|
|
1
|
+
# ruff: noqa: UP006 UP007
|
2
|
+
import abc
|
3
|
+
import dataclasses as dc
|
4
|
+
import http.client
|
5
|
+
import io
|
6
|
+
import os
|
7
|
+
import typing as ta
|
8
|
+
import urllib.request
|
9
|
+
|
10
|
+
from omlish.lite.check import check
|
11
|
+
|
12
|
+
from .targets import BytesDataServerTarget
|
13
|
+
from .targets import DataServerTarget
|
14
|
+
from .targets import FileDataServerTarget
|
15
|
+
from .targets import UrlDataServerTarget
|
16
|
+
|
17
|
+
|
18
|
+
DataServerTargetT = ta.TypeVar('DataServerTargetT', bound='DataServerTarget')
|
19
|
+
|
20
|
+
|
21
|
+
##
|
22
|
+
|
23
|
+
|
24
|
+
@dc.dataclass(frozen=True)
|
25
|
+
class DataServerRequest:
|
26
|
+
method: str
|
27
|
+
path: str
|
28
|
+
|
29
|
+
|
30
|
+
@dc.dataclass(frozen=True)
|
31
|
+
class DataServerResponse:
|
32
|
+
status: int
|
33
|
+
headers: ta.Optional[ta.Mapping[str, str]] = None
|
34
|
+
body: ta.Optional[io.IOBase] = None
|
35
|
+
|
36
|
+
#
|
37
|
+
|
38
|
+
def close(self) -> None:
|
39
|
+
if (body := self.body) is not None:
|
40
|
+
body.close()
|
41
|
+
|
42
|
+
def __enter__(self):
|
43
|
+
return self
|
44
|
+
|
45
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
46
|
+
self.close()
|
47
|
+
|
48
|
+
|
49
|
+
class DataServerError(Exception):
|
50
|
+
pass
|
51
|
+
|
52
|
+
|
53
|
+
class DataServerHandler(abc.ABC):
|
54
|
+
@abc.abstractmethod
|
55
|
+
def handle(self, req: DataServerRequest) -> DataServerResponse:
|
56
|
+
raise NotImplementedError
|
57
|
+
|
58
|
+
|
59
|
+
##
|
60
|
+
|
61
|
+
|
62
|
+
class DataServerTargetHandler(DataServerHandler, abc.ABC, ta.Generic[DataServerTargetT]):
|
63
|
+
def __init__(self, target: DataServerTargetT) -> None:
|
64
|
+
super().__init__()
|
65
|
+
|
66
|
+
self._target = target
|
67
|
+
|
68
|
+
#
|
69
|
+
|
70
|
+
@classmethod
|
71
|
+
def for_target(cls, tgt: DataServerTarget, **kwargs: ta.Any) -> 'DataServerTargetHandler':
|
72
|
+
try:
|
73
|
+
hc = _DATA_SERVER_TARGET_HANDLERS[type(tgt)]
|
74
|
+
except KeyError:
|
75
|
+
raise TypeError(tgt) # noqa
|
76
|
+
else:
|
77
|
+
return hc(tgt, **kwargs)
|
78
|
+
|
79
|
+
#
|
80
|
+
|
81
|
+
def _make_headers(self) -> ta.Dict[str, str]:
|
82
|
+
dct = {}
|
83
|
+
if (ct := self._target.content_type) is not None:
|
84
|
+
dct['Content-Type'] = ct
|
85
|
+
if (cl := self._target.content_length) is not None:
|
86
|
+
dct['Content-Length'] = str(cl)
|
87
|
+
return dct
|
88
|
+
|
89
|
+
|
90
|
+
#
|
91
|
+
|
92
|
+
|
93
|
+
_DATA_SERVER_TARGET_HANDLERS: ta.Dict[ta.Type[DataServerTarget], ta.Type[DataServerTargetHandler]] = {}
|
94
|
+
|
95
|
+
|
96
|
+
def _register_data_server_target_handler(*tcs):
|
97
|
+
def inner(hc):
|
98
|
+
check.issubclass(hc, DataServerTargetHandler)
|
99
|
+
for tc in tcs:
|
100
|
+
check.issubclass(tc, DataServerTarget)
|
101
|
+
check.not_in(tc, _DATA_SERVER_TARGET_HANDLERS)
|
102
|
+
_DATA_SERVER_TARGET_HANDLERS[tc] = hc
|
103
|
+
return hc
|
104
|
+
return inner
|
105
|
+
|
106
|
+
|
107
|
+
#
|
108
|
+
|
109
|
+
|
110
|
+
@_register_data_server_target_handler(BytesDataServerTarget)
|
111
|
+
class BytesDataServerTargetHandler(DataServerTargetHandler[BytesDataServerTarget]):
|
112
|
+
def _make_headers(self) -> ta.Dict[str, str]:
|
113
|
+
dct = super()._make_headers()
|
114
|
+
if 'Content-Length' not in dct and self._target.data is not None:
|
115
|
+
dct['Content-Length'] = str(len(self._target.data))
|
116
|
+
return dct
|
117
|
+
|
118
|
+
def handle(self, req: DataServerRequest) -> DataServerResponse:
|
119
|
+
if req.method not in ('GET', 'HEAD'):
|
120
|
+
return DataServerResponse(http.HTTPStatus.METHOD_NOT_ALLOWED)
|
121
|
+
|
122
|
+
return DataServerResponse(
|
123
|
+
http.HTTPStatus.OK,
|
124
|
+
headers=self._make_headers(),
|
125
|
+
body=io.BytesIO(self._target.data) if self._target.data is not None and req.method == 'GET' else None,
|
126
|
+
)
|
127
|
+
|
128
|
+
|
129
|
+
#
|
130
|
+
|
131
|
+
|
132
|
+
@_register_data_server_target_handler(FileDataServerTarget)
|
133
|
+
class FileDataServerTargetHandler(DataServerTargetHandler[FileDataServerTarget]):
|
134
|
+
def handle(self, req: DataServerRequest) -> DataServerResponse:
|
135
|
+
if req.method == 'HEAD':
|
136
|
+
try:
|
137
|
+
st = os.stat(check.not_none(self._target.file_path))
|
138
|
+
except FileNotFoundError:
|
139
|
+
return DataServerResponse(http.HTTPStatus.NOT_FOUND)
|
140
|
+
|
141
|
+
return DataServerResponse(
|
142
|
+
http.HTTPStatus.OK,
|
143
|
+
headers={
|
144
|
+
'Content-Length': str(st.st_size),
|
145
|
+
**self._make_headers(),
|
146
|
+
},
|
147
|
+
)
|
148
|
+
|
149
|
+
elif req.method == 'GET':
|
150
|
+
try:
|
151
|
+
f = open(check.not_none(self._target.file_path), 'rb') # noqa
|
152
|
+
except FileNotFoundError:
|
153
|
+
return DataServerResponse(http.HTTPStatus.NOT_FOUND)
|
154
|
+
|
155
|
+
try:
|
156
|
+
sz = os.fstat(f.fileno())
|
157
|
+
|
158
|
+
return DataServerResponse(
|
159
|
+
http.HTTPStatus.OK,
|
160
|
+
headers={
|
161
|
+
'Content-Length': str(sz.st_size),
|
162
|
+
**self._make_headers(),
|
163
|
+
},
|
164
|
+
body=f, # noqa
|
165
|
+
)
|
166
|
+
|
167
|
+
except Exception: # noqa
|
168
|
+
f.close()
|
169
|
+
raise
|
170
|
+
|
171
|
+
else:
|
172
|
+
return DataServerResponse(http.HTTPStatus.METHOD_NOT_ALLOWED)
|
173
|
+
|
174
|
+
|
175
|
+
#
|
176
|
+
|
177
|
+
|
178
|
+
@_register_data_server_target_handler(UrlDataServerTarget)
|
179
|
+
class UrlDataServerTargetHandler(DataServerTargetHandler[UrlDataServerTarget]):
|
180
|
+
def handle(self, req: DataServerRequest) -> DataServerResponse:
|
181
|
+
if req.method not in check.not_none(self._target.methods):
|
182
|
+
return DataServerResponse(http.HTTPStatus.METHOD_NOT_ALLOWED)
|
183
|
+
|
184
|
+
resp: http.client.HTTPResponse = urllib.request.urlopen(urllib.request.Request( # noqa
|
185
|
+
method=req.method,
|
186
|
+
url=check.not_none(self._target.url),
|
187
|
+
))
|
188
|
+
|
189
|
+
try:
|
190
|
+
return DataServerResponse(
|
191
|
+
resp.status,
|
192
|
+
headers=dict(resp.headers.items()),
|
193
|
+
body=resp,
|
194
|
+
)
|
195
|
+
|
196
|
+
except Exception: # noqa
|
197
|
+
resp.close()
|
198
|
+
raise
|
omdev/dataserver/http.py
ADDED
@@ -0,0 +1,69 @@
|
|
1
|
+
# ruff: noqa: UP006 UP007
|
2
|
+
"""
|
3
|
+
TODO:
|
4
|
+
- asyncio
|
5
|
+
- chunked transfer - both output and urllib input
|
6
|
+
"""
|
7
|
+
import typing as ta
|
8
|
+
|
9
|
+
from omlish.http.handlers import HttpHandler_
|
10
|
+
from omlish.http.handlers import HttpHandlerRequest
|
11
|
+
from omlish.http.handlers import HttpHandlerResponse
|
12
|
+
from omlish.http.handlers import HttpHandlerResponseStreamedData
|
13
|
+
|
14
|
+
from .handlers import DataServerRequest
|
15
|
+
from .server import DataServer
|
16
|
+
|
17
|
+
|
18
|
+
##
|
19
|
+
|
20
|
+
|
21
|
+
class DataServerHttpHandler(HttpHandler_):
|
22
|
+
DEFAULT_READ_CHUNK_SIZE = 0x10000
|
23
|
+
|
24
|
+
def __init__(
|
25
|
+
self,
|
26
|
+
ps: DataServer,
|
27
|
+
*,
|
28
|
+
read_chunk_size: int = DEFAULT_READ_CHUNK_SIZE,
|
29
|
+
) -> None:
|
30
|
+
super().__init__()
|
31
|
+
|
32
|
+
self._ps = ps
|
33
|
+
self._read_chunk_size = read_chunk_size
|
34
|
+
|
35
|
+
def __call__(self, req: HttpHandlerRequest) -> HttpHandlerResponse:
|
36
|
+
p_req = DataServerRequest(
|
37
|
+
req.method,
|
38
|
+
req.path,
|
39
|
+
)
|
40
|
+
|
41
|
+
p_resp = self._ps.handle(p_req)
|
42
|
+
try:
|
43
|
+
data: ta.Any
|
44
|
+
if (p_body := p_resp.body) is not None:
|
45
|
+
def stream_data():
|
46
|
+
try:
|
47
|
+
while (b := p_body.read(self._read_chunk_size)):
|
48
|
+
yield b
|
49
|
+
finally:
|
50
|
+
p_body.close()
|
51
|
+
|
52
|
+
data = HttpHandlerResponseStreamedData(stream_data())
|
53
|
+
|
54
|
+
else:
|
55
|
+
data = None
|
56
|
+
|
57
|
+
resp = HttpHandlerResponse(
|
58
|
+
status=p_resp.status,
|
59
|
+
headers=p_resp.headers,
|
60
|
+
data=data,
|
61
|
+
close_connection=True,
|
62
|
+
)
|
63
|
+
|
64
|
+
return resp
|
65
|
+
|
66
|
+
except Exception: # noqa
|
67
|
+
p_resp.close()
|
68
|
+
|
69
|
+
raise
|