omlish 0.0.0.dev39__py3-none-any.whl → 0.0.0.dev41__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
omlish/__about__.py CHANGED
@@ -1,5 +1,5 @@
1
- __version__ = '0.0.0.dev39'
2
- __revision__ = 'a18b6ac9698db2d5d932d6055a4828fd9f6c5848-dirty'
1
+ __version__ = '0.0.0.dev41'
2
+ __revision__ = '9666ba3a1c40b95074dfcacd1c0c87ca5e267d6f'
3
3
 
4
4
 
5
5
  #
@@ -40,7 +40,8 @@ class Project(ProjectBase):
40
40
  ],
41
41
 
42
42
  'compress': [
43
- 'lz4 ~= 4.0',
43
+ 'lz4 ~= 4.3',
44
+ # 'lz4 @ git+https://github.com/wrmsr/python-lz4@wrmsr_20240830_GIL_NOT_USED'
44
45
 
45
46
  'python-snappy ~= 0.7; python_version < "3.13"',
46
47
 
@@ -83,7 +84,7 @@ class Project(ProjectBase):
83
84
  'sqlalchemy[asyncio] ~= 2.0',
84
85
  ],
85
86
 
86
- 'sqlalchemy-drivers': [
87
+ 'sqldrivers': [
87
88
  'pg8000 ~= 1.31',
88
89
  # 'psycopg2 ~= 2.9',
89
90
  # 'psycopg ~= 3.2',
@@ -96,6 +97,8 @@ class Project(ProjectBase):
96
97
  'aiosqlite ~= 0.20',
97
98
  'asyncpg ~= 0.29; python_version < "3.13"',
98
99
 
100
+ 'apsw ~= 3.46',
101
+
99
102
  'sqlean.py ~= 3.45; python_version < "3.13"',
100
103
 
101
104
  'duckdb ~= 1.1',
omlish/argparse.py CHANGED
@@ -73,7 +73,7 @@ class Command:
73
73
 
74
74
  def __post_init__(self) -> None:
75
75
  check.isinstance(self.name, str)
76
- check.not_in('-', self.name)
76
+ check.not_in('_', self.name)
77
77
  check.not_empty(self.name)
78
78
 
79
79
  check.callable(self.fn)
@@ -103,7 +103,7 @@ def command(
103
103
 
104
104
  def inner(fn):
105
105
  return Command(
106
- (name if name is not None else fn.__name__).replace('-', '_'),
106
+ (name if name is not None else fn.__name__).replace('_', '-'),
107
107
  fn,
108
108
  args,
109
109
  parent=parent,
omlish/bootstrap/base.py CHANGED
@@ -2,7 +2,7 @@ import abc
2
2
  import dataclasses as dc
3
3
  import typing as ta
4
4
 
5
- from omlish import lang
5
+ from .. import lang
6
6
 
7
7
 
8
8
  BootstrapConfigT = ta.TypeVar('BootstrapConfigT', bound='Bootstrap.Config')
omlish/c3.py CHANGED
@@ -43,7 +43,8 @@ T = ta.TypeVar('T')
43
43
 
44
44
 
45
45
  def merge(seqs: ta.MutableSequence[list[T]]) -> list[T]:
46
- """Merges MROs in *sequences* to a single MRO using the C3 algorithm.
46
+ """
47
+ Merges MROs in *sequences* to a single MRO using the C3 algorithm.
47
48
 
48
49
  Adapted from https://www.python.org/download/releases/2.3/mro/.
49
50
  """
@@ -78,7 +79,8 @@ def mro(
78
79
  get_bases: ta.Callable[[T], ta.Sequence[T]] = operator.attrgetter('__bases__'),
79
80
  is_subclass: ta.Callable[[T, T], bool] = issubclass, # type: ignore
80
81
  ) -> list[T]:
81
- """Computes the method resolution order using extended C3 linearization.
82
+ """
83
+ Computes the method resolution order using extended C3 linearization.
82
84
 
83
85
  If no *abcs* are given, the algorithm works exactly like the built-in C3 linearization used for method resolution.
84
86
 
@@ -128,7 +130,8 @@ def compose_mro(
128
130
  is_subclass: ta.Callable[[T, T], bool] = issubclass, # type: ignore
129
131
  get_subclasses: ta.Callable[[T], ta.Iterable[T]] = operator.methodcaller('__subclasses__'),
130
132
  ) -> list[T]:
131
- """Calculates the method resolution order for a given class *cls*.
133
+ """
134
+ Calculates the method resolution order for a given class *cls*.
132
135
 
133
136
  Includes relevant abstract base classes (with their respective bases) from the *types* list. Uses a modified C3
134
137
  linearization algorithm.
omlish/check.py CHANGED
@@ -44,41 +44,49 @@ def unregister_on_raise(fn: OnRaiseFn) -> None:
44
44
  #
45
45
 
46
46
 
47
- _render_args: ta.Callable[..., str | None] | None = None
47
+ _ARGS_RENDERER: ta.Callable[..., str | None] | None = None
48
48
 
49
49
 
50
- def enable_args_rendering() -> bool:
51
- global _render_args
52
- if _render_args is not None:
50
+ def _try_enable_args_rendering() -> bool:
51
+ global _ARGS_RENDERER
52
+ if _ARGS_RENDERER is not None:
53
53
  return True
54
54
 
55
- with _CONFIG_LOCK:
56
- if _render_args is not None:
57
- return True # type: ignore
55
+ try:
56
+ from .diag.asts import ArgsRenderer
58
57
 
59
- try:
60
- from .diag.asts import ArgsRenderer
58
+ ArgsRenderer.smoketest()
61
59
 
62
- ArgsRenderer.smoketest()
60
+ except Exception: # noqa
61
+ return False
63
62
 
64
- except Exception: # noqa
65
- return False
63
+ def _real_render_args(fmt: str, *args: ta.Any) -> str | None:
64
+ ra = ArgsRenderer(back=3).render_args(*args)
65
+ if ra is None:
66
+ return None
66
67
 
67
- def _real_render_args(fmt: str, *args: ta.Any) -> str | None:
68
- ra = ArgsRenderer(back=3).render_args(*args)
69
- if ra is None:
70
- return None
68
+ return fmt % tuple(str(a) for a in ra)
71
69
 
72
- return fmt % tuple(str(a) for a in ra)
70
+ _ARGS_RENDERER = _real_render_args
71
+ return True
73
72
 
74
- _render_args = _real_render_args
75
- return True
76
73
 
74
+ _TRIED_ENABLED_ARGS_RENDERING: bool | None = None
77
75
 
78
- enable_args_rendering()
79
76
 
77
+ def try_enable_args_rendering() -> bool:
78
+ global _TRIED_ENABLED_ARGS_RENDERING
79
+ if _TRIED_ENABLED_ARGS_RENDERING is not None:
80
+ return _TRIED_ENABLED_ARGS_RENDERING
80
81
 
81
- #
82
+ with _CONFIG_LOCK:
83
+ if _TRIED_ENABLED_ARGS_RENDERING is None:
84
+ _TRIED_ENABLED_ARGS_RENDERING = _try_enable_args_rendering()
85
+
86
+ return _TRIED_ENABLED_ARGS_RENDERING
87
+
88
+
89
+ ##
82
90
 
83
91
 
84
92
  def _default_exception_factory(exc_cls: type[Exception], *args, **kwargs) -> Exception:
@@ -111,8 +119,8 @@ def _raise(
111
119
  if message is None:
112
120
  message = default_message
113
121
 
114
- if render_fmt is not None and _render_args is not None:
115
- rendered_args = _render_args(render_fmt, *ak.args)
122
+ if render_fmt is not None and _ARGS_RENDERER is not None:
123
+ rendered_args = _ARGS_RENDERER(render_fmt, *ak.args)
116
124
  if rendered_args is not None:
117
125
  message = f'{message} : {rendered_args}'
118
126
 
@@ -124,6 +132,8 @@ def _raise(
124
132
  **ak.kwargs,
125
133
  )
126
134
 
135
+ try_enable_args_rendering()
136
+
127
137
  for fn in _ON_RAISE:
128
138
  fn(exc)
129
139
 
@@ -7,7 +7,7 @@ import abc
7
7
  import dataclasses as dc
8
8
  import typing as ta
9
9
 
10
- from omlish import lang
10
+ from .. import lang
11
11
 
12
12
 
13
13
  if ta.TYPE_CHECKING:
omlish/diag/pycharm.py CHANGED
@@ -9,9 +9,9 @@ from .. import lang
9
9
 
10
10
 
11
11
  if ta.TYPE_CHECKING:
12
- docker = lang.proxy_import('omlish.docker')
12
+ docker = lang.proxy_import('..docker')
13
13
  else:
14
- from omlish import docker
14
+ from .. import docker
15
15
 
16
16
 
17
17
  ##
@@ -0,0 +1,27 @@
1
+ from .cli import ( # noqa
2
+ Inspect,
3
+ Port,
4
+ PsItem,
5
+ cli_inspect,
6
+ cli_ps,
7
+ has_cli,
8
+ parse_port,
9
+ )
10
+
11
+ from .compose import ( # noqa
12
+ ComposeConfig,
13
+ get_compose_port,
14
+ )
15
+
16
+ from .helpers import ( # noqa
17
+ DOCKER_FOR_MAC_HOSTNAME,
18
+ DOCKER_HOST_PLATFORM_KEY,
19
+ get_docker_host_platform,
20
+ is_likely_in_docker,
21
+ timebomb_payload,
22
+ )
23
+
24
+ from .hub import ( # noqa
25
+ HubRepoInfo,
26
+ get_hub_repo_info,
27
+ )
omlish/docker/cli.py ADDED
@@ -0,0 +1,101 @@
1
+ """
2
+ TODO:
3
+ - https://github.com/mag37/dockcheck/blob/3d122f2b868eb53a25a3014f0f6bd499390a3a29/dockcheck.sh
4
+ - https://github.com/regclient/regclient
5
+ - https://stackoverflow.com/questions/71409458/how-to-download-docker-image-using-http-api-using-docker-hub-credentials
6
+ - https://stackoverflow.com/questions/55386202/how-can-i-use-the-docker-registry-api-to-pull-information-about-a-container-get
7
+ - https://ops.tips/blog/inspecting-docker-image-without-pull/
8
+ """ # noqa
9
+ import datetime
10
+ import re
11
+ import subprocess
12
+ import typing as ta
13
+
14
+ from .. import check
15
+ from .. import dataclasses as dc
16
+ from .. import lang
17
+ from .. import marshal as msh
18
+ from ..formats import json
19
+
20
+
21
+ @dc.dataclass(frozen=True)
22
+ @msh.update_object_metadata(field_naming=msh.Naming.CAMEL, unknown_field='x')
23
+ @msh.update_fields_metadata(['id'], name='ID')
24
+ class PsItem(lang.Final):
25
+ command: str
26
+ created_at: datetime.datetime
27
+ id: str
28
+ image: str
29
+ labels: str
30
+ local_volumes: str
31
+ mounts: str
32
+ names: str
33
+ networks: str
34
+ ports: str
35
+ running_for: str
36
+ size: str
37
+ state: str
38
+ status: str
39
+
40
+ x: ta.Mapping[str, ta.Any] | None = None
41
+
42
+
43
+ class Port(ta.NamedTuple):
44
+ ip: str
45
+ from_port: int
46
+ to_port: int
47
+ proto: str
48
+
49
+
50
+ _PORT_PAT = re.compile(r'(?P<ip>[^:]+):(?P<from_port>\d+)->(?P<to_port>\d+)/(?P<proto>\w+)')
51
+
52
+
53
+ def parse_port(s: str) -> Port:
54
+ # '0.0.0.0:35221->22/tcp, 0.0.0.0:35220->8000/tcp'
55
+ m = check.not_none(_PORT_PAT.fullmatch(s))
56
+ return Port(
57
+ m.group('ip'),
58
+ int(m.group('from_port')),
59
+ int(m.group('to_port')),
60
+ m.group('proto'),
61
+ )
62
+
63
+
64
+ def cli_ps() -> list[PsItem]:
65
+ o = subprocess.check_output([
66
+ 'docker',
67
+ 'ps',
68
+ '--no-trunc',
69
+ '--format', '{{json .}}',
70
+ ])
71
+
72
+ ret: list[PsItem] = []
73
+ for l in o.decode().splitlines():
74
+ d = json.loads(l)
75
+ pi = msh.unmarshal(d, PsItem)
76
+ ret.append(pi)
77
+
78
+ return ret
79
+
80
+
81
+ @dc.dataclass(frozen=True)
82
+ @msh.update_object_metadata(field_naming=msh.Naming.CAMEL, unknown_field='x')
83
+ class Inspect(lang.Final):
84
+ id: str
85
+ created: datetime.datetime
86
+
87
+ x: ta.Mapping[str, ta.Any] | None = None
88
+
89
+
90
+ def cli_inspect(ids: list[str]) -> list[Inspect]:
91
+ o = subprocess.check_output(['docker', 'inspect', *ids])
92
+ return msh.unmarshal(json.loads(o.decode()), list[Inspect])
93
+
94
+
95
+ def has_cli() -> bool:
96
+ try:
97
+ proc = subprocess.run(['docker', '--version']) # noqa
98
+ except (FileNotFoundError, subprocess.CalledProcessError):
99
+ return False
100
+ else:
101
+ return not proc.returncode
@@ -0,0 +1,51 @@
1
+ """
2
+ TODO:
3
+ - merged compose configs: https://github.com/wrmsr/bane/blob/27647abdcfb323b73e6982a5c318c7029496b203/core/dev/docker/compose.go#L38
4
+ """ # noqa
5
+ import typing as ta
6
+
7
+ from .. import check
8
+ from .. import lang
9
+
10
+
11
+ if ta.TYPE_CHECKING:
12
+ import yaml
13
+ else:
14
+ yaml = lang.proxy_import('yaml')
15
+
16
+
17
+ class ComposeConfig:
18
+ def __init__(
19
+ self,
20
+ prefix: str,
21
+ *,
22
+ file_path: str | None = None,
23
+ ) -> None:
24
+ super().__init__()
25
+
26
+ self._prefix = prefix
27
+ self._file_path = file_path
28
+
29
+ @lang.cached_function
30
+ def get_config(self) -> ta.Mapping[str, ta.Any]:
31
+ with open(check.not_none(self._file_path)) as f:
32
+ buf = f.read()
33
+ return yaml.safe_load(buf)
34
+
35
+ @lang.cached_function
36
+ def get_services(self) -> ta.Mapping[str, ta.Any]:
37
+ ret = {}
38
+ for n, c in self.get_config()['services'].items():
39
+ check.state(n.startswith(self._prefix))
40
+ ret[n[len(self._prefix):]] = c
41
+
42
+ return ret
43
+
44
+
45
+ def get_compose_port(cfg: ta.Mapping[str, ta.Any], default: int) -> int:
46
+ return check.single(
47
+ int(l)
48
+ for p in cfg['ports']
49
+ for l, r in [p.split(':')]
50
+ if int(r) == default
51
+ )
@@ -0,0 +1,48 @@
1
+ import os
2
+ import re
3
+ import shlex
4
+ import sys
5
+
6
+
7
+ ##
8
+
9
+
10
+ _DEFAULT_TIMEBOMB_NAME = '-'.join([*__name__.split('.'), 'timebomb'])
11
+
12
+
13
+ def timebomb_payload(delay_s: float, name: str = _DEFAULT_TIMEBOMB_NAME) -> str:
14
+ return (
15
+ '('
16
+ f'echo {shlex.quote(name)} && '
17
+ f'sleep {delay_s:g} && '
18
+ 'sh -c \'killall5 -9 -o $PPID -o $$ ; kill 1\''
19
+ ') &'
20
+ )
21
+
22
+
23
+ ##
24
+
25
+
26
+ DOCKER_FOR_MAC_HOSTNAME = 'docker.for.mac.localhost'
27
+
28
+
29
+ _LIKELY_IN_DOCKER_PATTERN = re.compile(r'^overlay / .*/docker/')
30
+
31
+
32
+ def is_likely_in_docker() -> bool:
33
+ if getattr(sys, 'platform') != 'linux':
34
+ return False
35
+ with open('/proc/mounts') as f:
36
+ ls = f.readlines()
37
+ return any(_LIKELY_IN_DOCKER_PATTERN.match(l) for l in ls)
38
+
39
+
40
+ ##
41
+
42
+
43
+ # Set by pyproject, docker-dev script
44
+ DOCKER_HOST_PLATFORM_KEY = 'DOCKER_HOST_PLATFORM'
45
+
46
+
47
+ def get_docker_host_platform() -> str | None:
48
+ return os.environ.get(DOCKER_HOST_PLATFORM_KEY)
omlish/docker/hub.py ADDED
@@ -0,0 +1,75 @@
1
+ import typing as ta
2
+ import urllib.request
3
+
4
+ from .. import dataclasses as dc
5
+ from ..formats import json
6
+
7
+
8
+ @dc.dataclass(frozen=True)
9
+ class HubRepoInfo:
10
+ repo: str
11
+ tags: ta.Mapping[str, ta.Any]
12
+ latest_manifests: ta.Mapping[str, ta.Any]
13
+
14
+
15
+ def get_hub_repo_info(
16
+ repo: str,
17
+ *,
18
+ auth_url: str = 'https://auth.docker.io/',
19
+ api_url: str = 'https://registry-1.docker.io/v2/',
20
+ ) -> HubRepoInfo:
21
+ """
22
+ https://stackoverflow.com/a/39376254
23
+
24
+ ==
25
+
26
+ repo=library/nginx
27
+ token=$(
28
+ curl -s "https://auth.docker.io/token?service=registry.docker.io&scope=repository:${repo}:pull" \
29
+ | jq -r '.token' \
30
+ )
31
+ curl -H "Authorization: Bearer $token" -s "https://registry-1.docker.io/v2/${repo}/tags/list" | jq
32
+ curl \
33
+ -H "Accept: application/vnd.docker.distribution.manifest.v2+json" \
34
+ -H "Accept: application/vnd.docker.distribution.manifest.list.v2+json" \
35
+ -H "Authorization: Bearer $token" \
36
+ -s "https://registry-1.docker.io/v2/${repo}/manifests/latest" \
37
+ | jq .
38
+ """
39
+
40
+ auth_url = auth_url.rstrip('/')
41
+ api_url = api_url.rstrip('/')
42
+
43
+ #
44
+
45
+ def req_json(url: str, **kwargs: ta.Any) -> ta.Any:
46
+ with urllib.request.urlopen(urllib.request.Request(url, **kwargs)) as resp: # noqa
47
+ return json.loads(resp.read().decode('utf-8'))
48
+
49
+ #
50
+
51
+ token_dct = req_json(f'{auth_url}/token?service=registry.docker.io&scope=repository:{repo}:pull')
52
+ token = token_dct['token']
53
+
54
+ req_hdrs = {'Authorization': f'Bearer {token}'}
55
+
56
+ #
57
+
58
+ tags_dct = req_json(
59
+ f'{api_url}/{repo}/tags/list',
60
+ headers=req_hdrs,
61
+ )
62
+
63
+ latest_mani_dct = req_json(
64
+ f'{api_url}/{repo}/manifests/latest',
65
+ headers={
66
+ **req_hdrs,
67
+ 'Accept': 'application/vnd.docker.distribution.manifest.v2+json',
68
+ },
69
+ )
70
+
71
+ return HubRepoInfo(
72
+ repo,
73
+ tags_dct,
74
+ latest_mani_dct,
75
+ )
@@ -0,0 +1,166 @@
1
+ """
2
+ https://github.com/openshift/docker-distribution/blob/master/docs/spec/manifest-v2-2.md
3
+ """
4
+ import typing as ta
5
+
6
+ from .. import dataclasses as dc
7
+ from .. import lang
8
+ from .. import marshal as msh
9
+
10
+
11
+ SCHEMA_VERSION = 2
12
+
13
+
14
+ class MediaTypes(lang.Namespace):
15
+ # schema1 (existing manifest format)
16
+ MANIFEST_V1 = 'application/vnd.docker.distribution.manifest.v1+json'
17
+
18
+ # New image manifest format (schemaVersion = 2)
19
+ MANIFEST_V2 = 'application/vnd.docker.distribution.manifest.v2+json'
20
+
21
+ # Manifest list, aka "fat manifest"
22
+ MANIFEST_LIST = 'application/vnd.docker.distribution.manifest.list.v2+json'
23
+
24
+ # Container config JSON
25
+ CONTAINER_CONFIG = 'application/vnd.docker.container.image.v1+json'
26
+
27
+ # "Layer", as a gzipped tar
28
+ LAYER = 'application/vnd.docker.image.rootfs.diff.tar.gzip'
29
+
30
+ # "Layer", as a gzipped tar that should never be pushed
31
+ LAYER_NEVER_PUSH = 'application/vnd.docker.image.rootfs.foreign.diff.tar.gzip'
32
+
33
+ # Plugin config JSON
34
+ PLUGIN_CONFIG = 'application/vnd.docker.plugin.v1+json'
35
+
36
+
37
+ @dc.dataclass(frozen=True, kw_only=True)
38
+ @msh.update_object_metadata(field_naming=msh.Naming.LOW_CAMEL, unknown_field='x')
39
+ @msh.update_fields_metadata(['os_version'], name='os.version')
40
+ @msh.update_fields_metadata(['os_features'], name='os.features')
41
+ class Platform(lang.Final):
42
+ # The architecture field specifies the CPU architecture, for example amd64 or ppc64le.
43
+ architecture: str
44
+
45
+ # The os field specifies the operating system, for example linux or windows.
46
+ os: str
47
+
48
+ # The optional os.version field specifies the operating system version, for example 10.0.10586.
49
+ os_version: str | None = None
50
+
51
+ # The optional os.features field specifies an array of strings, each listing a required OS feature (for example on
52
+ # Windows win32k).
53
+ os_features: ta.Sequence[ta.Any] | None = None
54
+
55
+ # The optional variant field specifies a variant of the CPU, for example v6 to specify a particular CPU variant of
56
+ # the ARM CPU.
57
+ variant: str | None = None
58
+
59
+ # The optional features field specifies an array of strings, each listing a required CPU feature (for example sse4
60
+ # or aes).
61
+ features: ta.Sequence[ta.Any] | None = None
62
+
63
+ x: ta.Mapping[str, ta.Any] | None = None
64
+
65
+
66
+ @dc.dataclass(frozen=True)
67
+ @msh.update_object_metadata(field_naming=msh.Naming.LOW_CAMEL, unknown_field='x')
68
+ class Manifest(lang.Final):
69
+ # The MIME type of the referenced object. This will generally be
70
+ # application/vnd.docker.distribution.manifest.v2+json, but it could also be
71
+ # application/vnd.docker.distribution.manifest.v1+json if the manifest list references a legacy schema-1 manifest.
72
+ media_type: str
73
+
74
+ # The size in bytes of the object. This field exists so that a client will have an expected size for the content
75
+ # before validating. If the length of the retrieved content does not match the specified length, the content should
76
+ # not be trusted.
77
+ size: int
78
+
79
+ # The digest of the content, as defined by the Registry V2 HTTP API Specification.
80
+ digest: str
81
+
82
+ # The platform object describes the platform which the image in the manifest runs on. A full list of valid operating
83
+ # system and architecture values are listed in the Go language documentation for $GOOS and $GOARCH
84
+ platform: Platform | None = None
85
+
86
+ x: ta.Mapping[str, ta.Any] | None = None
87
+
88
+
89
+ @dc.dataclass(frozen=True)
90
+ @msh.update_object_metadata(field_naming=msh.Naming.LOW_CAMEL, unknown_field='x')
91
+ class ManifestList(lang.Final):
92
+ # This field specifies the image manifest schema version as an integer. This schema uses the version 2.
93
+ schema_version: int
94
+ dc.validate(lambda self: self.schema_version == SCHEMA_VERSION)
95
+
96
+ # The MIME type of the manifest list. This should be set to
97
+ # application/vnd.docker.distribution.manifest.list.v2+json.
98
+ media_type: str
99
+ dc.validate(lambda self: self.media_type == MediaTypes.MANIFEST_LIST)
100
+
101
+ # The manifests field contains a list of manifests for specific platforms.
102
+ manifests: ta.Sequence[Manifest]
103
+
104
+ x: ta.Mapping[str, ta.Any] | None = None
105
+
106
+
107
+ @dc.dataclass(frozen=True)
108
+ @msh.update_object_metadata(field_naming=msh.Naming.LOW_CAMEL, unknown_field='x')
109
+ class ImageManifest(lang.Final):
110
+ # This field specifies the image manifest schema version as an integer. This schema uses version 2.
111
+ schema_version: int
112
+ dc.validate(lambda self: self.schema_version == SCHEMA_VERSION)
113
+
114
+ # The MIME type of the manifest. This should be set to application/vnd.docker.distribution.manifest.v2+json.
115
+ media_type: str
116
+ dc.validate(lambda self: self.media_type == MediaTypes.MANIFEST_V2)
117
+
118
+ @dc.dataclass(frozen=True)
119
+ @msh.update_object_metadata(field_naming=msh.Naming.LOW_CAMEL, unknown_field='x')
120
+ class Config(lang.Final):
121
+ # The MIME type of the referenced object. This should generally be
122
+ # application/vnd.docker.container.image.v1+json.
123
+ media_type: str
124
+
125
+ # The size in bytes of the object. This field exists so that a client will have an expected size for the content
126
+ # before validating. If the length of the retrieved content does not match the specified length, the content
127
+ # should not be trusted.
128
+ size: int
129
+
130
+ # The digest of the content, as defined by the Registry V2 HTTP API Specification.
131
+ digest: str
132
+
133
+ x: ta.Mapping[str, ta.Any] | None = None
134
+
135
+ # The config field references a configuration object for a container, by digest. This configuration item is a JSON
136
+ # blob that the runtime uses to set up the container. This new schema uses a tweaked version of this configuration
137
+ # o allow image content-addressability on the daemon side.
138
+ config: Config | None = None
139
+
140
+ @dc.dataclass(frozen=True)
141
+ @msh.update_object_metadata(field_naming=msh.Naming.LOW_CAMEL, unknown_field='x')
142
+ class Layer(lang.Final):
143
+ # The MIME type of the referenced object. This should generally be
144
+ # application/vnd.docker.image.rootfs.diff.tar.gzip. Layers of type
145
+ # application/vnd.docker.image.rootfs.foreign.diff.tar.gzip may be pulled from a remote location but they should
146
+ # never be pushed.
147
+ media_type: str
148
+
149
+ # The size in bytes of the object. This field exists so that a client will have an expected size for the content
150
+ # before validating. If the length of the retrieved content does not match the specified length, the content
151
+ # should not be trusted.
152
+ size: int
153
+
154
+ # The digest of the content, as defined by the Registry V2 HTTP API Specification.
155
+ digest: str
156
+
157
+ # Provides a list of URLs from which the content may be fetched. Content must be verified against the digest and
158
+ # size. This field is optional and uncommon.
159
+ urls: ta.Sequence[str] | None = None
160
+
161
+ x: ta.Mapping[str, ta.Any] | None = None
162
+
163
+ # The layer list is ordered starting from the base image (opposite order of schema1).
164
+ layers: ta.Sequence[Layer] | None = None
165
+
166
+ x: ta.Mapping[str, ta.Any] | None = None
omlish/lang/__init__.py CHANGED
@@ -192,6 +192,7 @@ from .strings import ( # noqa
192
192
  is_sunder,
193
193
  prefix_delimited,
194
194
  prefix_lines,
195
+ replace_many,
195
196
  snake_case,
196
197
  strip_prefix,
197
198
  strip_suffix,