omdev 0.0.0.dev213__py3-none-any.whl → 0.0.0.dev214__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
omdev/ci/cli.py CHANGED
@@ -1,6 +1,5 @@
1
1
  # @omlish-amalg ../scripts/ci.py
2
2
  # ruff: noqa: UP006 UP007
3
- # @omlish-lite
4
3
  """
5
4
  Inputs:
6
5
  - requirements.txt
@@ -9,9 +8,11 @@ Inputs:
9
8
 
10
9
  ==
11
10
 
12
- ./python -m ci run --cache-dir ci/cache ci/project omlish-ci
11
+ ./python -m omdev.ci run --cache-dir omdev/ci/tests/cache omdev/ci/tests/project omlish-ci
13
12
  """
13
+ import argparse
14
14
  import asyncio
15
+ import itertools
15
16
  import os.path
16
17
  import sys
17
18
  import typing as ta
@@ -20,15 +21,15 @@ from omlish.argparse.cli import ArgparseCli
20
21
  from omlish.argparse.cli import argparse_arg
21
22
  from omlish.argparse.cli import argparse_cmd
22
23
  from omlish.lite.check import check
24
+ from omlish.lite.logs import log
23
25
  from omlish.logs.standard import configure_standard_logging
24
26
 
25
27
  from .cache import DirectoryFileCache
26
- from .cache import DirectoryShellCache
27
28
  from .cache import FileCache
28
- from .cache import ShellCache
29
29
  from .ci import Ci
30
30
  from .compose import get_compose_service_dependencies
31
- from .github.cache import GithubShellCache
31
+ from .github.bootstrap import is_in_github_actions
32
+ from .github.cache import GithubFileCache
32
33
  from .github.cli import GithubCli
33
34
  from .requirements import build_requirements_hash
34
35
  from .shell import ShellCmd
@@ -65,8 +66,8 @@ class CiCli(ArgparseCli):
65
66
  @argparse_cmd(
66
67
  accepts_unknown=True,
67
68
  )
68
- def github(self) -> ta.Optional[int]:
69
- return GithubCli(self.unknown_args).cli_run()
69
+ async def github(self) -> ta.Optional[int]:
70
+ return await GithubCli(self.unknown_args).async_cli_run()
70
71
 
71
72
  #
72
73
 
@@ -77,13 +78,20 @@ class CiCli(ArgparseCli):
77
78
  argparse_arg('--compose-file'),
78
79
  argparse_arg('-r', '--requirements-txt', action='append'),
79
80
 
80
- argparse_arg('--github-cache', action='store_true'),
81
81
  argparse_arg('--cache-dir'),
82
82
 
83
+ argparse_arg('--github', action='store_true'),
84
+ argparse_arg('--github-detect', action='store_true'),
85
+
83
86
  argparse_arg('--always-pull', action='store_true'),
84
87
  argparse_arg('--always-build', action='store_true'),
85
88
 
86
89
  argparse_arg('--no-dependencies', action='store_true'),
90
+
91
+ argparse_arg('-e', '--env', action='append'),
92
+ argparse_arg('-v', '--volume', action='append'),
93
+
94
+ argparse_arg('cmd', nargs=argparse.REMAINDER),
87
95
  )
88
96
  async def run(self) -> None:
89
97
  project_dir = self.args.project_dir
@@ -94,6 +102,11 @@ class CiCli(ArgparseCli):
94
102
 
95
103
  #
96
104
 
105
+ cmd = ' '.join(self.args.cmd)
106
+ check.non_empty_str(cmd)
107
+
108
+ #
109
+
97
110
  check.state(os.path.isdir(project_dir))
98
111
 
99
112
  #
@@ -102,6 +115,7 @@ class CiCli(ArgparseCli):
102
115
  for alt in alts:
103
116
  alt_file = os.path.abspath(os.path.join(project_dir, alt))
104
117
  if os.path.isfile(alt_file):
118
+ log.debug('Using %s', alt_file)
105
119
  return alt_file
106
120
  return None
107
121
 
@@ -135,6 +149,7 @@ class CiCli(ArgparseCli):
135
149
  'requirements-ci.txt',
136
150
  ]:
137
151
  if os.path.exists(os.path.join(project_dir, rf)):
152
+ log.debug('Using %s', rf)
138
153
  requirements_txts.append(rf)
139
154
  else:
140
155
  for rf in requirements_txts:
@@ -142,21 +157,34 @@ class CiCli(ArgparseCli):
142
157
 
143
158
  #
144
159
 
145
- shell_cache: ta.Optional[ShellCache] = None
160
+ github = self.args.github
161
+ if not github and self.args.github_detect:
162
+ github = is_in_github_actions()
163
+ if github:
164
+ log.debug('Github detected')
165
+
166
+ #
167
+
146
168
  file_cache: ta.Optional[FileCache] = None
147
169
  if cache_dir is not None:
148
- if not os.path.exists(cache_dir):
149
- os.makedirs(cache_dir)
150
- check.state(os.path.isdir(cache_dir))
151
-
152
- directory_file_cache = DirectoryFileCache(cache_dir)
170
+ cache_dir = os.path.abspath(cache_dir)
171
+ log.debug('Using cache dir %s', cache_dir)
172
+ if github:
173
+ file_cache = GithubFileCache(cache_dir)
174
+ else:
175
+ file_cache = DirectoryFileCache(cache_dir)
153
176
 
154
- file_cache = directory_file_cache
177
+ #
155
178
 
156
- if self.args.github_cache:
157
- shell_cache = GithubShellCache(cache_dir)
158
- else:
159
- shell_cache = DirectoryShellCache(directory_file_cache)
179
+ run_options: ta.List[str] = []
180
+ for run_arg, run_arg_vals in [
181
+ ('-e', self.args.env or []),
182
+ ('-v', self.args.volume or []),
183
+ ]:
184
+ run_options.extend(itertools.chain.from_iterable(
185
+ [run_arg, run_arg_val]
186
+ for run_arg_val in run_arg_vals
187
+ ))
160
188
 
161
189
  #
162
190
 
@@ -171,18 +199,16 @@ class CiCli(ArgparseCli):
171
199
 
172
200
  requirements_txts=requirements_txts,
173
201
 
174
- cmd=ShellCmd(' && '.join([
175
- 'cd /project',
176
- 'python3 -m pytest -svv test.py',
177
- ])),
202
+ cmd=ShellCmd(cmd),
178
203
 
179
204
  always_pull=self.args.always_pull,
180
205
  always_build=self.args.always_build,
181
206
 
182
207
  no_dependencies=self.args.no_dependencies,
208
+
209
+ run_options=run_options,
183
210
  ),
184
211
  file_cache=file_cache,
185
- shell_cache=shell_cache,
186
212
  ) as ci:
187
213
  await ci.run()
188
214
 
omdev/ci/compose.py CHANGED
@@ -1,5 +1,4 @@
1
1
  # ruff: noqa: UP006 UP007
2
- # @omlish-lite
3
2
  """
4
3
  TODO:
5
4
  - fix rmi - only when not referenced anymore
@@ -18,9 +17,9 @@ from omlish.lite.contextmanagers import AsyncExitStacked
18
17
  from omlish.lite.contextmanagers import adefer
19
18
  from omlish.lite.contextmanagers import defer
20
19
  from omlish.lite.json import json_dumps_pretty
20
+ from omlish.os.temp import make_temp_file
21
21
 
22
22
  from .shell import ShellCmd
23
- from .utils import make_temp_file
24
23
  from .utils import read_yaml_file
25
24
 
26
25
 
@@ -103,11 +102,6 @@ class DockerComposeRun(AsyncExitStacked):
103
102
  if k in out_service:
104
103
  del out_service[k]
105
104
 
106
- out_service['links'] = [
107
- f'{l}:{l}' if ':' not in l else l
108
- for l in out_service.get('links', [])
109
- ]
110
-
111
105
  #
112
106
 
113
107
  if not self._cfg.no_dependencies:
@@ -124,7 +118,6 @@ class DockerComposeRun(AsyncExitStacked):
124
118
 
125
119
  else:
126
120
  out_service['depends_on'] = []
127
- out_service['links'] = []
128
121
 
129
122
  #
130
123
 
omdev/ci/consts.py ADDED
@@ -0,0 +1 @@
1
+ CI_CACHE_VERSION = 1
omdev/ci/docker.py CHANGED
@@ -1,5 +1,4 @@
1
1
  # ruff: noqa: UP006 UP007
2
- # @omlish-lite
3
2
  """
4
3
  TODO:
5
4
  - some less stupid Dockerfile hash
@@ -15,10 +14,9 @@ import typing as ta
15
14
 
16
15
  from omlish.asyncs.asyncio.subprocesses import asyncio_subprocesses
17
16
  from omlish.lite.check import check
18
- from omlish.lite.contextmanagers import defer
17
+ from omlish.os.temp import temp_file_context
19
18
 
20
19
  from .shell import ShellCmd
21
- from .utils import make_temp_file
22
20
  from .utils import sha256_str
23
21
 
24
22
 
@@ -91,16 +89,16 @@ async def build_docker_image(
91
89
  *,
92
90
  tag: ta.Optional[str] = None,
93
91
  cwd: ta.Optional[str] = None,
92
+ run_options: ta.Optional[ta.Sequence[str]] = None,
94
93
  ) -> str:
95
- id_file = make_temp_file()
96
- with defer(lambda: os.unlink(id_file)):
94
+ with temp_file_context() as id_file:
97
95
  await asyncio_subprocesses.check_call(
98
96
  'docker',
99
97
  'build',
100
98
  '-f', os.path.abspath(docker_file),
101
99
  '--iidfile', id_file,
102
- '--squash',
103
100
  *(['--tag', tag] if tag is not None else []),
101
+ *(run_options or []),
104
102
  '.',
105
103
  **(dict(cwd=cwd) if cwd is not None else {}),
106
104
  )
@@ -1,5 +1,4 @@
1
1
  # ruff: noqa: UP006 UP007
2
- # @omlish-lite
3
2
  """
4
3
  export FILE_SIZE=$(stat --format="%s" $FILE)
5
4
 
@@ -1,5 +1,4 @@
1
1
  # ruff: noqa: UP006 UP007
2
- # @omlish-lite
3
2
  """
4
3
  sudo rm -rf \
5
4
  /usr/local/.ghcup \
@@ -9,3 +8,11 @@ sudo rm -rf \
9
8
  /opt/hostedtoolcache 8.0G, 14843980 files
10
9
  /usr/local/lib/android 6.4G, 17251667 files
11
10
  """
11
+ from .env import register_github_env_var
12
+
13
+
14
+ GITHUB_ACTIONS_ENV_VAR = register_github_env_var('GITHUB_ACTIONS')
15
+
16
+
17
+ def is_in_github_actions() -> bool:
18
+ return GITHUB_ACTIONS_ENV_VAR() is not None
omdev/ci/github/cache.py CHANGED
@@ -1,324 +1,71 @@
1
1
  # ruff: noqa: UP006 UP007
2
- # @omlish-lite
3
- import abc
4
- import dataclasses as dc
5
- import os
6
- import shlex
2
+ import os.path
7
3
  import typing as ta
8
- import urllib.parse
9
4
 
10
5
  from omlish.lite.check import check
11
- from omlish.subprocesses import subprocesses
6
+ from omlish.os.files import unlinking_if_exists
12
7
 
13
8
  from ..cache import DirectoryFileCache
14
- from ..cache import ShellCache
15
- from ..shell import ShellCmd
16
- from ..utils import make_temp_file
17
- from .cacheapi import GithubCacheServiceV1
18
- from .curl import GithubServiceCurlClient
9
+ from ..cache import FileCache
10
+ from .client import GithubCacheClient
11
+ from .client import GithubCacheServiceV1Client
19
12
 
20
13
 
21
14
  ##
22
15
 
23
16
 
24
- class GithubCacheShellClient(abc.ABC):
25
- class Entry(abc.ABC): # noqa
26
- pass
27
-
28
- @abc.abstractmethod
29
- def run_get_entry(self, key: str) -> ta.Optional[Entry]:
30
- raise NotImplementedError
31
-
32
- @abc.abstractmethod
33
- def download_get_entry(self, entry: Entry, out_file: str) -> None:
34
- raise NotImplementedError
35
-
36
- @abc.abstractmethod
37
- def upload_cache_entry(self, key: str, in_file: str) -> None:
38
- raise NotImplementedError
39
-
40
-
41
- #
42
-
43
-
44
- class GithubCacheServiceV1ShellClient(GithubCacheShellClient):
45
- BASE_URL_ENV_KEY = 'ACTIONS_CACHE_URL'
46
- AUTH_TOKEN_ENV_KEY = 'ACTIONS_RUNTIME_TOKEN' # noqa
47
-
48
- KEY_SUFFIX_ENV_KEY = 'GITHUB_RUN_ID'
49
-
50
- CACHE_VERSION: ta.ClassVar[int] = 1
51
-
52
- #
53
-
54
- def __init__(
55
- self,
56
- *,
57
- base_url: ta.Optional[str] = None,
58
- auth_token: ta.Optional[str] = None,
59
-
60
- key_prefix: ta.Optional[str] = None,
61
- key_suffix: ta.Optional[str] = None,
62
- ) -> None:
63
- super().__init__()
64
-
65
- #
66
-
67
- if base_url is None:
68
- base_url = os.environ[self.BASE_URL_ENV_KEY]
69
- service_url = GithubCacheServiceV1.get_service_url(base_url)
70
-
71
- if auth_token is None:
72
- auth_token = os.environ.get(self.AUTH_TOKEN_ENV_KEY)
73
-
74
- self._curl = GithubServiceCurlClient(
75
- service_url,
76
- auth_token,
77
- api_version=GithubCacheServiceV1.API_VERSION,
78
- )
79
-
80
- #
81
-
82
- self._key_prefix = key_prefix
83
-
84
- if key_suffix is None:
85
- key_suffix = os.environ[self.KEY_SUFFIX_ENV_KEY]
86
- self._key_suffix = check.non_empty_str(key_suffix)
87
-
88
- #
89
-
90
- KEY_PART_SEPARATOR = '--'
91
-
92
- def fix_key(self, s: str) -> str:
93
- return self.KEY_PART_SEPARATOR.join([
94
- *([self._key_prefix] if self._key_prefix else []),
95
- s,
96
- self._key_suffix,
97
- ])
98
-
99
- #
100
-
101
- @dc.dataclass(frozen=True)
102
- class Entry(GithubCacheShellClient.Entry):
103
- artifact: GithubCacheServiceV1.ArtifactCacheEntry
104
-
105
- #
106
-
107
- def build_get_entry_curl_cmd(self, key: str) -> ShellCmd:
108
- fixed_key = self.fix_key(key)
109
-
110
- qp = dict(
111
- keys=fixed_key,
112
- version=str(self.CACHE_VERSION),
113
- )
114
-
115
- return self._curl.build_cmd(
116
- 'GET',
117
- shlex.quote('?'.join([
118
- 'cache',
119
- '&'.join([
120
- f'{k}={urllib.parse.quote_plus(v)}'
121
- for k, v in qp.items()
122
- ]),
123
- ])),
124
- )
125
-
126
- def run_get_entry(self, key: str) -> ta.Optional[Entry]:
127
- fixed_key = self.fix_key(key)
128
- curl_cmd = self.build_get_entry_curl_cmd(fixed_key)
129
-
130
- obj = self._curl.run_json_cmd(
131
- curl_cmd,
132
- success_status_codes=[200, 204],
133
- )
134
- if obj is None:
135
- return None
136
-
137
- return self.Entry(GithubCacheServiceV1.dataclass_from_json(
138
- GithubCacheServiceV1.ArtifactCacheEntry,
139
- obj,
140
- ))
141
-
142
- #
143
-
144
- def build_download_get_entry_cmd(self, entry: Entry, out_file: str) -> ShellCmd:
145
- return ShellCmd(' '.join([
146
- 'aria2c',
147
- '-x', '4',
148
- '-o', out_file,
149
- check.non_empty_str(entry.artifact.archive_location),
150
- ]))
151
-
152
- def download_get_entry(self, entry: GithubCacheShellClient.Entry, out_file: str) -> None:
153
- dl_cmd = self.build_download_get_entry_cmd(
154
- check.isinstance(entry, GithubCacheServiceV1ShellClient.Entry),
155
- out_file,
156
- )
157
- dl_cmd.run(subprocesses.check_call)
158
-
159
- #
160
-
161
- def upload_cache_entry(self, key: str, in_file: str) -> None:
162
- fixed_key = self.fix_key(key)
163
-
164
- check.state(os.path.isfile(in_file))
165
-
166
- file_size = os.stat(in_file).st_size
167
-
168
- #
169
-
170
- reserve_req = GithubCacheServiceV1.ReserveCacheRequest(
171
- key=fixed_key,
172
- cache_size=file_size,
173
- version=str(self.CACHE_VERSION),
174
- )
175
- reserve_cmd = self._curl.build_post_json_cmd(
176
- 'caches',
177
- GithubCacheServiceV1.dataclass_to_json(reserve_req),
178
- )
179
- reserve_resp_obj: ta.Any = check.not_none(self._curl.run_json_cmd(
180
- reserve_cmd,
181
- success_status_codes=[201],
182
- ))
183
- reserve_resp = GithubCacheServiceV1.dataclass_from_json( # noqa
184
- GithubCacheServiceV1.ReserveCacheResponse,
185
- reserve_resp_obj,
186
- )
187
- cache_id = check.isinstance(reserve_resp.cache_id, int)
188
-
189
- #
190
-
191
- tmp_file = make_temp_file()
192
-
193
- print(f'{file_size=}')
194
- num_written = 0
195
- chunk_size = 32 * 1024 * 1024
196
- for i in range((file_size // chunk_size) + (1 if file_size % chunk_size else 0)):
197
- ofs = i * chunk_size
198
- sz = min(chunk_size, file_size - ofs)
199
-
200
- patch_cmd = self._curl.build_cmd(
201
- 'PATCH',
202
- f'caches/{cache_id}',
203
- content_type='application/octet-stream',
204
- headers={
205
- 'Content-Range': f'bytes {ofs}-{ofs + sz - 1}/*',
206
- },
207
- )
208
-
209
- #
210
-
211
- # patch_data_cmd = dc.replace(patch_cmd, s=' | '.join([
212
- # f'dd if={in_file} bs={chunk_size} skip={i} count=1 status=none',
213
- # f'{patch_cmd.s} --data-binary -',
214
- # ]))
215
- # print(f'{patch_data_cmd.s=}')
216
- # patch_result = self._curl.run_cmd(patch_data_cmd, raise_=True)
217
-
218
- #
219
-
220
- with open(in_file, 'rb') as f:
221
- f.seek(ofs)
222
- buf = f.read(sz)
223
- with open(tmp_file, 'wb') as f:
224
- f.write(buf)
225
- num_written += len(buf)
226
- print(f'{num_written=}')
227
- patch_data_cmd = dc.replace(patch_cmd, s=f'{patch_cmd.s} --data-binary @{tmp_file}')
228
- print(f'{patch_data_cmd.s=}')
229
- patch_result = self._curl.run_cmd(patch_data_cmd, raise_=True)
230
-
231
- #
232
-
233
- check.equal(patch_result.status_code, 204)
234
- ofs += sz
235
-
236
- #
237
-
238
- commit_req = GithubCacheServiceV1.CommitCacheRequest(
239
- size=file_size,
240
- )
241
- commit_cmd = self._curl.build_post_json_cmd(
242
- f'caches/{cache_id}',
243
- GithubCacheServiceV1.dataclass_to_json(commit_req),
244
- )
245
- commit_result = self._curl.run_cmd(commit_cmd, raise_=True)
246
- check.equal(commit_result.status_code, 204)
247
-
248
-
249
- ##
250
-
251
-
252
- class GithubShellCache(ShellCache):
17
+ class GithubFileCache(FileCache):
253
18
  def __init__(
254
19
  self,
255
20
  dir: str, # noqa
256
21
  *,
257
- client: ta.Optional[GithubCacheShellClient] = None,
22
+ client: ta.Optional[GithubCacheClient] = None,
23
+ **kwargs: ta.Any,
258
24
  ) -> None:
259
- super().__init__()
25
+ super().__init__(**kwargs)
260
26
 
261
27
  self._dir = check.not_none(dir)
262
28
 
263
29
  if client is None:
264
- client = GithubCacheServiceV1ShellClient()
265
- self._client: GithubCacheShellClient = client
30
+ client = GithubCacheServiceV1Client(
31
+ cache_version=self._version,
32
+ )
33
+ self._client: GithubCacheClient = client
266
34
 
267
- self._local = DirectoryFileCache(self._dir)
35
+ self._local = DirectoryFileCache(
36
+ self._dir,
37
+ version=self._version,
38
+ )
268
39
 
269
- def get_file_cmd(self, key: str) -> ta.Optional[ShellCmd]:
40
+ async def get_file(self, key: str) -> ta.Optional[str]:
270
41
  local_file = self._local.get_cache_file_path(key)
271
42
  if os.path.exists(local_file):
272
- return ShellCmd(f'cat {shlex.quote(local_file)}')
43
+ return local_file
273
44
 
274
- if (entry := self._client.run_get_entry(key)) is None:
45
+ if (entry := await self._client.get_entry(key)) is None:
275
46
  return None
276
47
 
277
48
  tmp_file = self._local.format_incomplete_file(local_file)
278
- try:
279
- self._client.download_get_entry(entry, tmp_file)
49
+ with unlinking_if_exists(tmp_file):
50
+ await self._client.download_file(entry, tmp_file)
280
51
 
281
52
  os.replace(tmp_file, local_file)
282
53
 
283
- except BaseException: # noqa
284
- os.unlink(tmp_file)
285
-
286
- raise
54
+ return local_file
287
55
 
288
- return ShellCmd(f'cat {shlex.quote(local_file)}')
289
-
290
- class _PutFileCmdContext(ShellCache.PutFileCmdContext): # noqa
291
- def __init__(
292
- self,
293
- owner: 'GithubShellCache',
294
- key: str,
295
- tmp_file: str,
296
- local_file: str,
297
- ) -> None:
298
- super().__init__()
299
-
300
- self._owner = owner
301
- self._key = key
302
- self._tmp_file = tmp_file
303
- self._local_file = local_file
304
-
305
- @property
306
- def cmd(self) -> ShellCmd:
307
- return ShellCmd(f'cat > {shlex.quote(self._tmp_file)}')
308
-
309
- def _commit(self) -> None:
310
- os.replace(self._tmp_file, self._local_file)
311
-
312
- self._owner._client.upload_cache_entry(self._key, self._local_file) # noqa
313
-
314
- def _abort(self) -> None:
315
- os.unlink(self._tmp_file)
316
-
317
- def put_file_cmd(self, key: str) -> ShellCache.PutFileCmdContext:
318
- local_file = self._local.get_cache_file_path(key, make_dirs=True)
319
- return self._PutFileCmdContext(
56
+ async def put_file(
320
57
  self,
58
+ key: str,
59
+ file_path: str,
60
+ *,
61
+ steal: bool = False,
62
+ ) -> str:
63
+ cache_file_path = await self._local.put_file(
321
64
  key,
322
- self._local.format_incomplete_file(local_file),
323
- local_file,
65
+ file_path,
66
+ steal=steal,
324
67
  )
68
+
69
+ await self._client.upload_file(key, cache_file_path)
70
+
71
+ return cache_file_path
omdev/ci/github/cli.py CHANGED
@@ -1,5 +1,4 @@
1
1
  # ruff: noqa: UP006 UP007
2
- # @omlish-lite
3
2
  """
4
3
  See:
5
4
  - https://docs.github.com/en/rest/actions/cache?apiVersion=2022-11-28
@@ -11,16 +10,21 @@ from omlish.argparse.cli import argparse_arg
11
10
  from omlish.argparse.cli import argparse_cmd
12
11
  from omlish.lite.json import json_dumps_pretty
13
12
 
14
- from .cache import GithubCacheServiceV1ShellClient
13
+ from .client import GithubCacheServiceV1Client
14
+ from .env import GITHUB_ENV_VARS
15
15
 
16
16
 
17
17
  class GithubCli(ArgparseCli):
18
+ @argparse_cmd()
19
+ def list_referenced_env_vars(self) -> None:
20
+ print('\n'.join(sorted(ev.k for ev in GITHUB_ENV_VARS)))
21
+
18
22
  @argparse_cmd(
19
23
  argparse_arg('key'),
20
24
  )
21
- def get_cache_entry(self) -> None:
22
- shell_client = GithubCacheServiceV1ShellClient()
23
- entry = shell_client.run_get_entry(self.args.key)
25
+ async def get_cache_entry(self) -> None:
26
+ client = GithubCacheServiceV1Client()
27
+ entry = await client.get_entry(self.args.key)
24
28
  if entry is None:
25
29
  return
26
30
  print(json_dumps_pretty(dc.asdict(entry))) # noqa