ominfra 0.0.0.dev192__py3-none-any.whl → 0.0.0.dev194__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -36,6 +36,7 @@ import abc
36
36
  import base64
37
37
  import collections
38
38
  import collections.abc
39
+ import configparser
39
40
  import contextlib
40
41
  import contextvars
41
42
  import ctypes as ct
@@ -79,7 +80,7 @@ import types
79
80
  import typing as ta
80
81
  import uuid
81
82
  import warnings
82
- import weakref # noqa
83
+ import weakref
83
84
 
84
85
 
85
86
  ########################################
@@ -92,15 +93,21 @@ if sys.version_info < (3, 8):
92
93
  ########################################
93
94
 
94
95
 
95
- # ../../omdev/toml/parser.py
96
- TomlParseFloat = ta.Callable[[str], ta.Any]
97
- TomlKey = ta.Tuple[str, ...]
98
- TomlPos = int # ta.TypeAlias
99
-
100
96
  # utils/collections.py
101
97
  K = ta.TypeVar('K')
102
98
  V = ta.TypeVar('V')
103
99
 
100
+ # ../../omlish/configs/types.py
101
+ ConfigMap = ta.Mapping[str, ta.Any]
102
+
103
+ # ../../omlish/formats/ini/sections.py
104
+ IniSectionSettingsMap = ta.Mapping[str, ta.Mapping[str, ta.Union[str, ta.Sequence[str]]]] # ta.TypeAlias
105
+
106
+ # ../../omlish/formats/toml/parser.py
107
+ TomlParseFloat = ta.Callable[[str], ta.Any]
108
+ TomlKey = ta.Tuple[str, ...]
109
+ TomlPos = int # ta.TypeAlias
110
+
104
111
  # ../../omlish/lite/cached.py
105
112
  T = ta.TypeVar('T')
106
113
  CallableT = ta.TypeVar('CallableT', bound=ta.Callable)
@@ -125,6 +132,9 @@ SocketAddress = ta.Any
125
132
  EventCallback = ta.Callable[['Event'], None]
126
133
  ProcessOutputChannel = ta.Literal['stdout', 'stderr'] # ta.TypeAlias
127
134
 
135
+ # ../../omlish/configs/formats.py
136
+ ConfigDataT = ta.TypeVar('ConfigDataT', bound='ConfigData')
137
+
128
138
  # ../../omlish/http/parsing.py
129
139
  HttpHeaders = http.client.HTTPMessage # ta.TypeAlias
130
140
 
@@ -141,10 +151,6 @@ InjectorBindingOrBindings = ta.Union['InjectorBinding', 'InjectorBindings']
141
151
  # ../../omlish/sockets/handlers.py
142
152
  SocketHandlerFactory = ta.Callable[[SocketAddress, ta.BinaryIO, ta.BinaryIO], 'SocketHandler']
143
153
 
144
- # ../configs.py
145
- ConfigMapping = ta.Mapping[str, ta.Any]
146
- IniConfigSectionSettingsMap = ta.Mapping[str, ta.Mapping[str, ta.Union[str, ta.Sequence[str]]]] # ta.TypeAlias
147
-
148
154
  # ../../omlish/http/handlers.py
149
155
  HttpHandler = ta.Callable[['HttpHandlerRequest'], 'HttpHandlerResponse'] # ta.TypeAlias
150
156
 
@@ -153,1291 +159,1462 @@ CoroHttpServerFactory = ta.Callable[[SocketAddress], 'CoroHttpServer']
153
159
 
154
160
 
155
161
  ########################################
156
- # ../../../omdev/toml/parser.py
157
- # SPDX-License-Identifier: MIT
158
- # SPDX-FileCopyrightText: 2021 Taneli Hukkinen
159
- # Licensed to PSF under a Contributor Agreement.
160
- #
161
- # PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
162
- # --------------------------------------------
163
- #
164
- # 1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and the Individual or Organization
165
- # ("Licensee") accessing and otherwise using this software ("Python") in source or binary form and its associated
166
- # documentation.
167
- #
168
- # 2. Subject to the terms and conditions of this License Agreement, PSF hereby grants Licensee a nonexclusive,
169
- # royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative
170
- # works, distribute, and otherwise use Python alone or in any derivative version, provided, however, that PSF's License
171
- # Agreement and PSF's notice of copyright, i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
172
- # 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation; All
173
- # Rights Reserved" are retained in Python alone or in any derivative version prepared by Licensee.
174
- #
175
- # 3. In the event Licensee prepares a derivative work that is based on or incorporates Python or any part thereof, and
176
- # wants to make the derivative work available to others as provided herein, then Licensee hereby agrees to include in
177
- # any such work a brief summary of the changes made to Python.
178
- #
179
- # 4. PSF is making Python available to Licensee on an "AS IS" basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES,
180
- # EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY
181
- # OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT INFRINGE ANY THIRD PARTY
182
- # RIGHTS.
183
- #
184
- # 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL
185
- # DAMAGES OR LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, OR ANY DERIVATIVE THEREOF, EVEN IF
186
- # ADVISED OF THE POSSIBILITY THEREOF.
187
- #
188
- # 6. This License Agreement will automatically terminate upon a material breach of its terms and conditions.
189
- #
190
- # 7. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint
191
- # venture between PSF and Licensee. This License Agreement does not grant permission to use PSF trademarks or trade
192
- # name in a trademark sense to endorse or promote products or services of Licensee, or any third party.
193
- #
194
- # 8. By copying, installing or otherwise using Python, Licensee agrees to be bound by the terms and conditions of this
195
- # License Agreement.
196
- #
197
- # https://github.com/python/cpython/blob/9ce90206b7a4649600218cf0bd4826db79c9a312/Lib/tomllib/_parser.py
162
+ # ../exceptions.py
198
163
 
199
164
 
200
- ##
165
+ class ProcessError(Exception):
166
+ """Specialized exceptions used when attempting to start a process."""
201
167
 
202
168
 
203
- _TOML_TIME_RE_STR = r'([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(?:\.([0-9]{1,6})[0-9]*)?'
169
+ class BadCommandError(ProcessError):
170
+ """Indicates the command could not be parsed properly."""
204
171
 
205
- TOML_RE_NUMBER = re.compile(
206
- r"""
207
- 0
208
- (?:
209
- x[0-9A-Fa-f](?:_?[0-9A-Fa-f])* # hex
210
- |
211
- b[01](?:_?[01])* # bin
212
- |
213
- o[0-7](?:_?[0-7])* # oct
214
- )
215
- |
216
- [+-]?(?:0|[1-9](?:_?[0-9])*) # dec, integer part
217
- (?P<floatpart>
218
- (?:\.[0-9](?:_?[0-9])*)? # optional fractional part
219
- (?:[eE][+-]?[0-9](?:_?[0-9])*)? # optional exponent part
220
- )
221
- """,
222
- flags=re.VERBOSE,
223
- )
224
- TOML_RE_LOCALTIME = re.compile(_TOML_TIME_RE_STR)
225
- TOML_RE_DATETIME = re.compile(
226
- rf"""
227
- ([0-9]{{4}})-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01]) # date, e.g. 1988-10-27
228
- (?:
229
- [Tt ]
230
- {_TOML_TIME_RE_STR}
231
- (?:([Zz])|([+-])([01][0-9]|2[0-3]):([0-5][0-9]))? # optional time offset
232
- )?
233
- """,
234
- flags=re.VERBOSE,
235
- )
236
172
 
173
+ class NotExecutableError(ProcessError):
174
+ """
175
+ Indicates that the filespec cannot be executed because its path resolves to a file which is not executable, or which
176
+ is a directory.
177
+ """
237
178
 
238
- def toml_match_to_datetime(match: re.Match) -> ta.Union[datetime.datetime, datetime.date]:
239
- """Convert a `RE_DATETIME` match to `datetime.datetime` or `datetime.date`.
240
179
 
241
- Raises ValueError if the match does not correspond to a valid date or datetime.
242
- """
243
- (
244
- year_str,
245
- month_str,
246
- day_str,
247
- hour_str,
248
- minute_str,
249
- sec_str,
250
- micros_str,
251
- zulu_time,
252
- offset_sign_str,
253
- offset_hour_str,
254
- offset_minute_str,
255
- ) = match.groups()
256
- year, month, day = int(year_str), int(month_str), int(day_str)
257
- if hour_str is None:
258
- return datetime.date(year, month, day)
259
- hour, minute, sec = int(hour_str), int(minute_str), int(sec_str)
260
- micros = int(micros_str.ljust(6, '0')) if micros_str else 0
261
- if offset_sign_str:
262
- tz: ta.Optional[datetime.tzinfo] = toml_cached_tz(
263
- offset_hour_str, offset_minute_str, offset_sign_str,
264
- )
265
- elif zulu_time:
266
- tz = datetime.UTC
267
- else: # local date-time
268
- tz = None
269
- return datetime.datetime(year, month, day, hour, minute, sec, micros, tzinfo=tz)
180
+ class NotFoundError(ProcessError):
181
+ """Indicates that the filespec cannot be executed because it could not be found."""
270
182
 
271
183
 
272
- @functools.lru_cache() # noqa
273
- def toml_cached_tz(hour_str: str, minute_str: str, sign_str: str) -> datetime.timezone:
274
- sign = 1 if sign_str == '+' else -1
275
- return datetime.timezone(
276
- datetime.timedelta(
277
- hours=sign * int(hour_str),
278
- minutes=sign * int(minute_str),
279
- ),
280
- )
184
+ class NoPermissionError(ProcessError):
185
+ """
186
+ Indicates that the file cannot be executed because the supervisor process does not possess the appropriate UNIX
187
+ filesystem permission to execute the file.
188
+ """
281
189
 
282
190
 
283
- def toml_match_to_localtime(match: re.Match) -> datetime.time:
284
- hour_str, minute_str, sec_str, micros_str = match.groups()
285
- micros = int(micros_str.ljust(6, '0')) if micros_str else 0
286
- return datetime.time(int(hour_str), int(minute_str), int(sec_str), micros)
191
+ ########################################
192
+ # ../privileges.py
287
193
 
288
194
 
289
- def toml_match_to_number(match: re.Match, parse_float: TomlParseFloat) -> ta.Any:
290
- if match.group('floatpart'):
291
- return parse_float(match.group())
292
- return int(match.group(), 0)
195
+ def drop_privileges(user: ta.Union[int, str, None]) -> ta.Optional[str]:
196
+ """
197
+ Drop privileges to become the specified user, which may be a username or uid. Called for supervisord startup and
198
+ when spawning subprocesses. Returns None on success or a string error message if privileges could not be dropped.
199
+ """
293
200
 
201
+ if user is None:
202
+ return 'No user specified to setuid to!'
294
203
 
295
- TOML_ASCII_CTRL = frozenset(chr(i) for i in range(32)) | frozenset(chr(127))
204
+ # get uid for user, which can be a number or username
205
+ try:
206
+ uid = int(user)
207
+ except ValueError:
208
+ try:
209
+ pwrec = pwd.getpwnam(user) # type: ignore
210
+ except KeyError:
211
+ return f"Can't find username {user!r}"
212
+ uid = pwrec[2]
213
+ else:
214
+ try:
215
+ pwrec = pwd.getpwuid(uid)
216
+ except KeyError:
217
+ return f"Can't find uid {uid!r}"
296
218
 
297
- # Neither of these sets include quotation mark or backslash. They are currently handled as separate cases in the parser
298
- # functions.
299
- TOML_ILLEGAL_BASIC_STR_CHARS = TOML_ASCII_CTRL - frozenset('\t')
300
- TOML_ILLEGAL_MULTILINE_BASIC_STR_CHARS = TOML_ASCII_CTRL - frozenset('\t\n')
219
+ current_uid = os.getuid()
301
220
 
302
- TOML_ILLEGAL_LITERAL_STR_CHARS = TOML_ILLEGAL_BASIC_STR_CHARS
303
- TOML_ILLEGAL_MULTILINE_LITERAL_STR_CHARS = TOML_ILLEGAL_MULTILINE_BASIC_STR_CHARS
221
+ if current_uid == uid:
222
+ # do nothing and return successfully if the uid is already the current one. this allows a supervisord running as
223
+ # an unprivileged user "foo" to start a process where the config has "user=foo" (same user) in it.
224
+ return None
304
225
 
305
- TOML_ILLEGAL_COMMENT_CHARS = TOML_ILLEGAL_BASIC_STR_CHARS
226
+ if current_uid != 0:
227
+ return "Can't drop privilege as nonroot user"
306
228
 
307
- TOML_WS = frozenset(' \t')
308
- TOML_WS_AND_NEWLINE = TOML_WS | frozenset('\n')
309
- TOML_BARE_KEY_CHARS = frozenset(string.ascii_letters + string.digits + '-_')
310
- TOML_KEY_INITIAL_CHARS = TOML_BARE_KEY_CHARS | frozenset("\"'")
311
- TOML_HEXDIGIT_CHARS = frozenset(string.hexdigits)
229
+ gid = pwrec[3]
230
+ if hasattr(os, 'setgroups'):
231
+ user = pwrec[0]
232
+ groups = [grprec[2] for grprec in grp.getgrall() if user in grprec[3]]
312
233
 
313
- TOML_BASIC_STR_ESCAPE_REPLACEMENTS = types.MappingProxyType(
314
- {
315
- '\\b': '\u0008', # backspace
316
- '\\t': '\u0009', # tab
317
- '\\n': '\u000A', # linefeed
318
- '\\f': '\u000C', # form feed
319
- '\\r': '\u000D', # carriage return
320
- '\\"': '\u0022', # quote
321
- '\\\\': '\u005C', # backslash
322
- },
323
- )
234
+ # always put our primary gid first in this list, otherwise we can lose group info since sometimes the first
235
+ # group in the setgroups list gets overwritten on the subsequent setgid call (at least on freebsd 9 with
236
+ # python 2.7 - this will be safe though for all unix /python version combos)
237
+ groups.insert(0, gid)
238
+ try:
239
+ os.setgroups(groups)
240
+ except OSError:
241
+ return 'Could not set groups of effective user'
324
242
 
243
+ try:
244
+ os.setgid(gid)
245
+ except OSError:
246
+ return 'Could not set group id of effective user'
325
247
 
326
- class TomlDecodeError(ValueError):
327
- """An error raised if a document is not valid TOML."""
248
+ os.setuid(uid)
328
249
 
250
+ return None
329
251
 
330
- def toml_load(fp: ta.BinaryIO, /, *, parse_float: TomlParseFloat = float) -> ta.Dict[str, ta.Any]:
331
- """Parse TOML from a binary file object."""
332
- b = fp.read()
333
- try:
334
- s = b.decode()
335
- except AttributeError:
336
- raise TypeError("File must be opened in binary mode, e.g. use `open('foo.toml', 'rb')`") from None
337
- return toml_loads(s, parse_float=parse_float)
338
252
 
253
+ ########################################
254
+ # ../states.py
339
255
 
340
- def toml_loads(s: str, /, *, parse_float: TomlParseFloat = float) -> ta.Dict[str, ta.Any]: # noqa: C901
341
- """Parse TOML from a string."""
342
256
 
343
- # The spec allows converting "\r\n" to "\n", even in string literals. Let's do so to simplify parsing.
344
- try:
345
- src = s.replace('\r\n', '\n')
346
- except (AttributeError, TypeError):
347
- raise TypeError(f"Expected str object, not '{type(s).__qualname__}'") from None
348
- pos = 0
349
- out = TomlOutput(TomlNestedDict(), TomlFlags())
350
- header: TomlKey = ()
351
- parse_float = toml_make_safe_parse_float(parse_float)
257
+ ##
352
258
 
353
- # Parse one statement at a time (typically means one line in TOML source)
354
- while True:
355
- # 1. Skip line leading whitespace
356
- pos = toml_skip_chars(src, pos, TOML_WS)
357
259
 
358
- # 2. Parse rules. Expect one of the following:
359
- # - end of file
360
- # - end of line
361
- # - comment
362
- # - key/value pair
363
- # - append dict to list (and move to its namespace)
364
- # - create dict (and move to its namespace)
365
- # Skip trailing whitespace when applicable.
366
- try:
367
- char = src[pos]
368
- except IndexError:
369
- break
370
- if char == '\n':
371
- pos += 1
372
- continue
373
- if char in TOML_KEY_INITIAL_CHARS:
374
- pos = toml_key_value_rule(src, pos, out, header, parse_float)
375
- pos = toml_skip_chars(src, pos, TOML_WS)
376
- elif char == '[':
377
- try:
378
- second_char: ta.Optional[str] = src[pos + 1]
379
- except IndexError:
380
- second_char = None
381
- out.flags.finalize_pending()
382
- if second_char == '[':
383
- pos, header = toml_create_list_rule(src, pos, out)
384
- else:
385
- pos, header = toml_create_dict_rule(src, pos, out)
386
- pos = toml_skip_chars(src, pos, TOML_WS)
387
- elif char != '#':
388
- raise toml_suffixed_err(src, pos, 'Invalid statement')
260
+ class ProcessState(enum.IntEnum):
261
+ STOPPED = 0
262
+ STARTING = 10
263
+ RUNNING = 20
264
+ BACKOFF = 30
265
+ STOPPING = 40
266
+ EXITED = 100
267
+ FATAL = 200
268
+ UNKNOWN = 1000
389
269
 
390
- # 3. Skip comment
391
- pos = toml_skip_comment(src, pos)
270
+ @property
271
+ def stopped(self) -> bool:
272
+ return self in STOPPED_STATES
392
273
 
393
- # 4. Expect end of line or end of file
394
- try:
395
- char = src[pos]
396
- except IndexError:
397
- break
398
- if char != '\n':
399
- raise toml_suffixed_err(
400
- src, pos, 'Expected newline or end of document after a statement',
401
- )
402
- pos += 1
274
+ @property
275
+ def running(self) -> bool:
276
+ return self in RUNNING_STATES
403
277
 
404
- return out.data.dict
278
+ @property
279
+ def signalable(self) -> bool:
280
+ return self in SIGNALABLE_STATES
405
281
 
406
282
 
407
- class TomlFlags:
408
- """Flags that map to parsed keys/namespaces."""
283
+ # http://supervisord.org/subprocess.html
284
+ STATE_TRANSITIONS = {
285
+ ProcessState.STOPPED: (ProcessState.STARTING,),
286
+ ProcessState.STARTING: (ProcessState.RUNNING, ProcessState.BACKOFF, ProcessState.STOPPING),
287
+ ProcessState.RUNNING: (ProcessState.STOPPING, ProcessState.EXITED),
288
+ ProcessState.BACKOFF: (ProcessState.STARTING, ProcessState.FATAL),
289
+ ProcessState.STOPPING: (ProcessState.STOPPED,),
290
+ ProcessState.EXITED: (ProcessState.STARTING,),
291
+ ProcessState.FATAL: (ProcessState.STARTING,),
292
+ }
409
293
 
410
- # Marks an immutable namespace (inline array or inline table).
411
- FROZEN = 0
412
- # Marks a nest that has been explicitly created and can no longer be opened using the "[table]" syntax.
413
- EXPLICIT_NEST = 1
294
+ STOPPED_STATES = (
295
+ ProcessState.STOPPED,
296
+ ProcessState.EXITED,
297
+ ProcessState.FATAL,
298
+ ProcessState.UNKNOWN,
299
+ )
414
300
 
415
- def __init__(self) -> None:
416
- self._flags: ta.Dict[str, dict] = {}
417
- self._pending_flags: ta.Set[ta.Tuple[TomlKey, int]] = set()
301
+ RUNNING_STATES = (
302
+ ProcessState.RUNNING,
303
+ ProcessState.BACKOFF,
304
+ ProcessState.STARTING,
305
+ )
418
306
 
419
- def add_pending(self, key: TomlKey, flag: int) -> None:
420
- self._pending_flags.add((key, flag))
307
+ SIGNALABLE_STATES = (
308
+ ProcessState.RUNNING,
309
+ ProcessState.STARTING,
310
+ ProcessState.STOPPING,
311
+ )
421
312
 
422
- def finalize_pending(self) -> None:
423
- for key, flag in self._pending_flags:
424
- self.set(key, flag, recursive=False)
425
- self._pending_flags.clear()
426
313
 
427
- def unset_all(self, key: TomlKey) -> None:
428
- cont = self._flags
429
- for k in key[:-1]:
430
- if k not in cont:
431
- return
432
- cont = cont[k]['nested']
433
- cont.pop(key[-1], None)
314
+ ##
434
315
 
435
- def set(self, key: TomlKey, flag: int, *, recursive: bool) -> None: # noqa: A003
436
- cont = self._flags
437
- key_parent, key_stem = key[:-1], key[-1]
438
- for k in key_parent:
439
- if k not in cont:
440
- cont[k] = {'flags': set(), 'recursive_flags': set(), 'nested': {}}
441
- cont = cont[k]['nested']
442
- if key_stem not in cont:
443
- cont[key_stem] = {'flags': set(), 'recursive_flags': set(), 'nested': {}}
444
- cont[key_stem]['recursive_flags' if recursive else 'flags'].add(flag)
445
316
 
446
- def is_(self, key: TomlKey, flag: int) -> bool:
447
- if not key:
448
- return False # document root has no flags
449
- cont = self._flags
450
- for k in key[:-1]:
451
- if k not in cont:
452
- return False
453
- inner_cont = cont[k]
454
- if flag in inner_cont['recursive_flags']:
455
- return True
456
- cont = inner_cont['nested']
457
- key_stem = key[-1]
458
- if key_stem in cont:
459
- cont = cont[key_stem]
460
- return flag in cont['flags'] or flag in cont['recursive_flags']
461
- return False
317
+ class SupervisorState(enum.IntEnum):
318
+ FATAL = 2
319
+ RUNNING = 1
320
+ RESTARTING = 0
321
+ SHUTDOWN = -1
462
322
 
463
323
 
464
- class TomlNestedDict:
465
- def __init__(self) -> None:
466
- # The parsed content of the TOML document
467
- self.dict: ta.Dict[str, ta.Any] = {}
324
+ ########################################
325
+ # ../utils/collections.py
468
326
 
469
- def get_or_create_nest(
470
- self,
471
- key: TomlKey,
472
- *,
473
- access_lists: bool = True,
474
- ) -> dict:
475
- cont: ta.Any = self.dict
476
- for k in key:
477
- if k not in cont:
478
- cont[k] = {}
479
- cont = cont[k]
480
- if access_lists and isinstance(cont, list):
481
- cont = cont[-1]
482
- if not isinstance(cont, dict):
483
- raise KeyError('There is no nest behind this key')
484
- return cont
485
327
 
486
- def append_nest_to_list(self, key: TomlKey) -> None:
487
- cont = self.get_or_create_nest(key[:-1])
488
- last_key = key[-1]
489
- if last_key in cont:
490
- list_ = cont[last_key]
491
- if not isinstance(list_, list):
492
- raise KeyError('An object other than list found behind this key')
493
- list_.append({})
494
- else:
495
- cont[last_key] = [{}]
328
+ class KeyedCollectionAccessors(abc.ABC, ta.Generic[K, V]):
329
+ @property
330
+ @abc.abstractmethod
331
+ def _by_key(self) -> ta.Mapping[K, V]:
332
+ raise NotImplementedError
496
333
 
334
+ def __iter__(self) -> ta.Iterator[V]:
335
+ return iter(self._by_key.values())
497
336
 
498
- class TomlOutput(ta.NamedTuple):
499
- data: TomlNestedDict
500
- flags: TomlFlags
337
+ def __len__(self) -> int:
338
+ return len(self._by_key)
501
339
 
340
+ def __contains__(self, key: K) -> bool:
341
+ return key in self._by_key
502
342
 
503
- def toml_skip_chars(src: str, pos: TomlPos, chars: ta.Iterable[str]) -> TomlPos:
504
- try:
505
- while src[pos] in chars:
506
- pos += 1
507
- except IndexError:
508
- pass
509
- return pos
343
+ def __getitem__(self, key: K) -> V:
344
+ return self._by_key[key]
510
345
 
346
+ def get(self, key: K, default: ta.Optional[V] = None) -> ta.Optional[V]:
347
+ return self._by_key.get(key, default)
511
348
 
512
- def toml_skip_until(
513
- src: str,
514
- pos: TomlPos,
515
- expect: str,
516
- *,
517
- error_on: ta.FrozenSet[str],
518
- error_on_eof: bool,
519
- ) -> TomlPos:
520
- try:
521
- new_pos = src.index(expect, pos)
522
- except ValueError:
523
- new_pos = len(src)
524
- if error_on_eof:
525
- raise toml_suffixed_err(src, new_pos, f'Expected {expect!r}') from None
349
+ def items(self) -> ta.Iterator[ta.Tuple[K, V]]:
350
+ return iter(self._by_key.items())
526
351
 
527
- if not error_on.isdisjoint(src[pos:new_pos]):
528
- while src[pos] not in error_on:
529
- pos += 1
530
- raise toml_suffixed_err(src, pos, f'Found invalid character {src[pos]!r}')
531
- return new_pos
532
352
 
353
+ class KeyedCollection(KeyedCollectionAccessors[K, V]):
354
+ def __init__(self, items: ta.Iterable[V]) -> None:
355
+ super().__init__()
533
356
 
534
- def toml_skip_comment(src: str, pos: TomlPos) -> TomlPos:
535
- try:
536
- char: ta.Optional[str] = src[pos]
537
- except IndexError:
538
- char = None
539
- if char == '#':
540
- return toml_skip_until(
541
- src, pos + 1, '\n', error_on=TOML_ILLEGAL_COMMENT_CHARS, error_on_eof=False,
542
- )
543
- return pos
357
+ by_key: ta.Dict[K, V] = {}
358
+ for v in items:
359
+ if (k := self._key(v)) in by_key:
360
+ raise KeyError(f'key {k} of {v} already registered by {by_key[k]}')
361
+ by_key[k] = v
362
+ self.__by_key = by_key
544
363
 
364
+ @property
365
+ def _by_key(self) -> ta.Mapping[K, V]:
366
+ return self.__by_key
545
367
 
546
- def toml_skip_comments_and_array_ws(src: str, pos: TomlPos) -> TomlPos:
547
- while True:
548
- pos_before_skip = pos
549
- pos = toml_skip_chars(src, pos, TOML_WS_AND_NEWLINE)
550
- pos = toml_skip_comment(src, pos)
551
- if pos == pos_before_skip:
552
- return pos
368
+ @abc.abstractmethod
369
+ def _key(self, v: V) -> K:
370
+ raise NotImplementedError
553
371
 
554
372
 
555
- def toml_create_dict_rule(src: str, pos: TomlPos, out: TomlOutput) -> ta.Tuple[TomlPos, TomlKey]:
556
- pos += 1 # Skip "["
557
- pos = toml_skip_chars(src, pos, TOML_WS)
558
- pos, key = toml_parse_key(src, pos)
373
+ ########################################
374
+ # ../utils/diag.py
559
375
 
560
- if out.flags.is_(key, TomlFlags.EXPLICIT_NEST) or out.flags.is_(key, TomlFlags.FROZEN):
561
- raise toml_suffixed_err(src, pos, f'Cannot declare {key} twice')
562
- out.flags.set(key, TomlFlags.EXPLICIT_NEST, recursive=False)
563
- try:
564
- out.data.get_or_create_nest(key)
565
- except KeyError:
566
- raise toml_suffixed_err(src, pos, 'Cannot overwrite a value') from None
567
376
 
568
- if not src.startswith(']', pos):
569
- raise toml_suffixed_err(src, pos, "Expected ']' at the end of a table declaration")
570
- return pos + 1, key
377
+ def compact_traceback() -> ta.Tuple[
378
+ ta.Tuple[str, str, int],
379
+ ta.Type[BaseException],
380
+ BaseException,
381
+ types.TracebackType,
382
+ ]:
383
+ t, v, tb = sys.exc_info()
384
+ if not tb:
385
+ raise RuntimeError('No traceback')
571
386
 
387
+ tbinfo = []
388
+ while tb:
389
+ tbinfo.append((
390
+ tb.tb_frame.f_code.co_filename,
391
+ tb.tb_frame.f_code.co_name,
392
+ str(tb.tb_lineno),
393
+ ))
394
+ tb = tb.tb_next
572
395
 
573
- def toml_create_list_rule(src: str, pos: TomlPos, out: TomlOutput) -> ta.Tuple[TomlPos, TomlKey]:
574
- pos += 2 # Skip "[["
575
- pos = toml_skip_chars(src, pos, TOML_WS)
576
- pos, key = toml_parse_key(src, pos)
396
+ # just to be safe
397
+ del tb
577
398
 
578
- if out.flags.is_(key, TomlFlags.FROZEN):
579
- raise toml_suffixed_err(src, pos, f'Cannot mutate immutable namespace {key}')
580
- # Free the namespace now that it points to another empty list item...
581
- out.flags.unset_all(key)
582
- # ...but this key precisely is still prohibited from table declaration
583
- out.flags.set(key, TomlFlags.EXPLICIT_NEST, recursive=False)
584
- try:
585
- out.data.append_nest_to_list(key)
586
- except KeyError:
587
- raise toml_suffixed_err(src, pos, 'Cannot overwrite a value') from None
399
+ file, function, line = tbinfo[-1]
400
+ info = ' '.join(['[%s|%s|%s]' % x for x in tbinfo]) # noqa
401
+ return (file, function, line), t, v, info # type: ignore
588
402
 
589
- if not src.startswith(']]', pos):
590
- raise toml_suffixed_err(src, pos, "Expected ']]' at the end of an array declaration")
591
- return pos + 2, key
592
403
 
404
+ ########################################
405
+ # ../utils/fs.py
593
406
 
594
- def toml_key_value_rule(
595
- src: str,
596
- pos: TomlPos,
597
- out: TomlOutput,
598
- header: TomlKey,
599
- parse_float: TomlParseFloat,
600
- ) -> TomlPos:
601
- pos, key, value = toml_parse_key_value_pair(src, pos, parse_float)
602
- key_parent, key_stem = key[:-1], key[-1]
603
- abs_key_parent = header + key_parent
604
407
 
605
- relative_path_cont_keys = (header + key[:i] for i in range(1, len(key)))
606
- for cont_key in relative_path_cont_keys:
607
- # Check that dotted key syntax does not redefine an existing table
608
- if out.flags.is_(cont_key, TomlFlags.EXPLICIT_NEST):
609
- raise toml_suffixed_err(src, pos, f'Cannot redefine namespace {cont_key}')
610
- # Containers in the relative path can't be opened with the table syntax or dotted key/value syntax in following
611
- # table sections.
612
- out.flags.add_pending(cont_key, TomlFlags.EXPLICIT_NEST)
408
+ def try_unlink(path: str) -> bool:
409
+ try:
410
+ os.unlink(path)
411
+ except OSError:
412
+ return False
413
+ return True
613
414
 
614
- if out.flags.is_(abs_key_parent, TomlFlags.FROZEN):
615
- raise toml_suffixed_err(
616
- src,
617
- pos,
618
- f'Cannot mutate immutable namespace {abs_key_parent}',
619
- )
620
415
 
621
- try:
622
- nest = out.data.get_or_create_nest(abs_key_parent)
623
- except KeyError:
624
- raise toml_suffixed_err(src, pos, 'Cannot overwrite a value') from None
625
- if key_stem in nest:
626
- raise toml_suffixed_err(src, pos, 'Cannot overwrite a value')
627
- # Mark inline table and array namespaces recursively immutable
628
- if isinstance(value, (dict, list)):
629
- out.flags.set(header + key, TomlFlags.FROZEN, recursive=True)
630
- nest[key_stem] = value
631
- return pos
416
+ def mktempfile(suffix: str, prefix: str, dir: str) -> str: # noqa
417
+ fd, filename = tempfile.mkstemp(suffix, prefix, dir)
418
+ os.close(fd)
419
+ return filename
632
420
 
633
421
 
634
- def toml_parse_key_value_pair(
635
- src: str,
636
- pos: TomlPos,
637
- parse_float: TomlParseFloat,
638
- ) -> ta.Tuple[TomlPos, TomlKey, ta.Any]:
639
- pos, key = toml_parse_key(src, pos)
640
- try:
641
- char: ta.Optional[str] = src[pos]
642
- except IndexError:
643
- char = None
644
- if char != '=':
645
- raise toml_suffixed_err(src, pos, "Expected '=' after a key in a key/value pair")
646
- pos += 1
647
- pos = toml_skip_chars(src, pos, TOML_WS)
648
- pos, value = toml_parse_value(src, pos, parse_float)
649
- return pos, key, value
422
+ def get_path() -> ta.Sequence[str]:
423
+ """Return a list corresponding to $PATH, or a default."""
650
424
 
425
+ path = ['/bin', '/usr/bin', '/usr/local/bin']
426
+ if 'PATH' in os.environ:
427
+ p = os.environ['PATH']
428
+ if p:
429
+ path = p.split(os.pathsep)
430
+ return path
651
431
 
652
- def toml_parse_key(src: str, pos: TomlPos) -> ta.Tuple[TomlPos, TomlKey]:
653
- pos, key_part = toml_parse_key_part(src, pos)
654
- key: TomlKey = (key_part,)
655
- pos = toml_skip_chars(src, pos, TOML_WS)
656
- while True:
657
- try:
658
- char: ta.Optional[str] = src[pos]
659
- except IndexError:
660
- char = None
661
- if char != '.':
662
- return pos, key
663
- pos += 1
664
- pos = toml_skip_chars(src, pos, TOML_WS)
665
- pos, key_part = toml_parse_key_part(src, pos)
666
- key += (key_part,)
667
- pos = toml_skip_chars(src, pos, TOML_WS)
668
432
 
433
+ def check_existing_dir(v: str) -> str:
434
+ nv = os.path.expanduser(v)
435
+ if os.path.isdir(nv):
436
+ return nv
437
+ raise ValueError(f'{v} is not an existing directory')
669
438
 
670
- def toml_parse_key_part(src: str, pos: TomlPos) -> ta.Tuple[TomlPos, str]:
671
- try:
672
- char: ta.Optional[str] = src[pos]
673
- except IndexError:
674
- char = None
675
- if char in TOML_BARE_KEY_CHARS:
676
- start_pos = pos
677
- pos = toml_skip_chars(src, pos, TOML_BARE_KEY_CHARS)
678
- return pos, src[start_pos:pos]
679
- if char == "'":
680
- return toml_parse_literal_str(src, pos)
681
- if char == '"':
682
- return toml_parse_one_line_basic_str(src, pos)
683
- raise toml_suffixed_err(src, pos, 'Invalid initial character for a key part')
684
439
 
440
+ def check_path_with_existing_dir(v: str) -> str:
441
+ nv = os.path.expanduser(v)
442
+ dir = os.path.dirname(nv) # noqa
443
+ if not dir:
444
+ # relative pathname with no directory component
445
+ return nv
446
+ if os.path.isdir(dir):
447
+ return nv
448
+ raise ValueError(f'The directory named as part of the path {v} does not exist')
685
449
 
686
- def toml_parse_one_line_basic_str(src: str, pos: TomlPos) -> ta.Tuple[TomlPos, str]:
687
- pos += 1
688
- return toml_parse_basic_str(src, pos, multiline=False)
689
450
 
451
+ ########################################
452
+ # ../utils/ostypes.py
690
453
 
691
- def toml_parse_array(src: str, pos: TomlPos, parse_float: TomlParseFloat) -> ta.Tuple[TomlPos, list]:
692
- pos += 1
693
- array: list = []
694
454
 
695
- pos = toml_skip_comments_and_array_ws(src, pos)
696
- if src.startswith(']', pos):
697
- return pos + 1, array
698
- while True:
699
- pos, val = toml_parse_value(src, pos, parse_float)
700
- array.append(val)
701
- pos = toml_skip_comments_and_array_ws(src, pos)
455
+ Fd = ta.NewType('Fd', int)
456
+ Pid = ta.NewType('Pid', int)
457
+ Rc = ta.NewType('Rc', int)
702
458
 
703
- c = src[pos:pos + 1]
704
- if c == ']':
705
- return pos + 1, array
706
- if c != ',':
707
- raise toml_suffixed_err(src, pos, 'Unclosed array')
708
- pos += 1
459
+ Uid = ta.NewType('Uid', int)
460
+ Gid = ta.NewType('Gid', int)
709
461
 
710
- pos = toml_skip_comments_and_array_ws(src, pos)
711
- if src.startswith(']', pos):
712
- return pos + 1, array
713
462
 
463
+ ########################################
464
+ # ../utils/signals.py
714
465
 
715
- def toml_parse_inline_table(src: str, pos: TomlPos, parse_float: TomlParseFloat) -> ta.Tuple[TomlPos, dict]:
716
- pos += 1
717
- nested_dict = TomlNestedDict()
718
- flags = TomlFlags()
719
466
 
720
- pos = toml_skip_chars(src, pos, TOML_WS)
721
- if src.startswith('}', pos):
722
- return pos + 1, nested_dict.dict
723
- while True:
724
- pos, key, value = toml_parse_key_value_pair(src, pos, parse_float)
725
- key_parent, key_stem = key[:-1], key[-1]
726
- if flags.is_(key, TomlFlags.FROZEN):
727
- raise toml_suffixed_err(src, pos, f'Cannot mutate immutable namespace {key}')
728
- try:
729
- nest = nested_dict.get_or_create_nest(key_parent, access_lists=False)
730
- except KeyError:
731
- raise toml_suffixed_err(src, pos, 'Cannot overwrite a value') from None
732
- if key_stem in nest:
733
- raise toml_suffixed_err(src, pos, f'Duplicate inline table key {key_stem!r}')
734
- nest[key_stem] = value
735
- pos = toml_skip_chars(src, pos, TOML_WS)
736
- c = src[pos:pos + 1]
737
- if c == '}':
738
- return pos + 1, nested_dict.dict
739
- if c != ',':
740
- raise toml_suffixed_err(src, pos, 'Unclosed inline table')
741
- if isinstance(value, (dict, list)):
742
- flags.set(key, TomlFlags.FROZEN, recursive=True)
743
- pos += 1
744
- pos = toml_skip_chars(src, pos, TOML_WS)
467
+ ##
745
468
 
746
469
 
747
- def toml_parse_basic_str_escape(
748
- src: str,
749
- pos: TomlPos,
750
- *,
751
- multiline: bool = False,
752
- ) -> ta.Tuple[TomlPos, str]:
753
- escape_id = src[pos:pos + 2]
754
- pos += 2
755
- if multiline and escape_id in {'\\ ', '\\\t', '\\\n'}:
756
- # Skip whitespace until next non-whitespace character or end of the doc. Error if non-whitespace is found before
757
- # newline.
758
- if escape_id != '\\\n':
759
- pos = toml_skip_chars(src, pos, TOML_WS)
760
- try:
761
- char = src[pos]
762
- except IndexError:
763
- return pos, ''
764
- if char != '\n':
765
- raise toml_suffixed_err(src, pos, "Unescaped '\\' in a string")
766
- pos += 1
767
- pos = toml_skip_chars(src, pos, TOML_WS_AND_NEWLINE)
768
- return pos, ''
769
- if escape_id == '\\u':
770
- return toml_parse_hex_char(src, pos, 4)
771
- if escape_id == '\\U':
772
- return toml_parse_hex_char(src, pos, 8)
470
+ _SIGS_BY_NUM: ta.Mapping[int, signal.Signals] = {s.value: s for s in signal.Signals}
471
+ _SIGS_BY_NAME: ta.Mapping[str, signal.Signals] = {s.name: s for s in signal.Signals}
472
+
473
+
474
+ def sig_num(value: ta.Union[int, str]) -> int:
773
475
  try:
774
- return pos, TOML_BASIC_STR_ESCAPE_REPLACEMENTS[escape_id]
775
- except KeyError:
776
- raise toml_suffixed_err(src, pos, "Unescaped '\\' in a string") from None
476
+ num = int(value)
777
477
 
478
+ except (ValueError, TypeError):
479
+ name = value.strip().upper() # type: ignore
480
+ if not name.startswith('SIG'):
481
+ name = f'SIG{name}'
778
482
 
779
- def toml_parse_basic_str_escape_multiline(src: str, pos: TomlPos) -> ta.Tuple[TomlPos, str]:
780
- return toml_parse_basic_str_escape(src, pos, multiline=True)
483
+ if (sn := _SIGS_BY_NAME.get(name)) is None:
484
+ raise ValueError(f'value {value!r} is not a valid signal name') # noqa
485
+ num = sn
781
486
 
487
+ if num not in _SIGS_BY_NUM:
488
+ raise ValueError(f'value {value!r} is not a valid signal number')
782
489
 
783
- def toml_parse_hex_char(src: str, pos: TomlPos, hex_len: int) -> ta.Tuple[TomlPos, str]:
784
- hex_str = src[pos:pos + hex_len]
785
- if len(hex_str) != hex_len or not TOML_HEXDIGIT_CHARS.issuperset(hex_str):
786
- raise toml_suffixed_err(src, pos, 'Invalid hex value')
787
- pos += hex_len
788
- hex_int = int(hex_str, 16)
789
- if not toml_is_unicode_scalar_value(hex_int):
790
- raise toml_suffixed_err(src, pos, 'Escaped character is not a Unicode scalar value')
791
- return pos, chr(hex_int)
490
+ return num
792
491
 
793
492
 
794
- def toml_parse_literal_str(src: str, pos: TomlPos) -> ta.Tuple[TomlPos, str]:
795
- pos += 1 # Skip starting apostrophe
796
- start_pos = pos
797
- pos = toml_skip_until(
798
- src, pos, "'", error_on=TOML_ILLEGAL_LITERAL_STR_CHARS, error_on_eof=True,
799
- )
800
- return pos + 1, src[start_pos:pos] # Skip ending apostrophe
493
+ def sig_name(num: int) -> str:
494
+ if (sig := _SIGS_BY_NUM.get(num)) is not None:
495
+ return sig.name
496
+ return f'signal {sig}'
801
497
 
802
498
 
803
- def toml_parse_multiline_str(src: str, pos: TomlPos, *, literal: bool) -> ta.Tuple[TomlPos, str]:
804
- pos += 3
805
- if src.startswith('\n', pos):
806
- pos += 1
499
+ ##
807
500
 
808
- if literal:
809
- delim = "'"
810
- end_pos = toml_skip_until(
811
- src,
812
- pos,
813
- "'''",
814
- error_on=TOML_ILLEGAL_MULTILINE_LITERAL_STR_CHARS,
815
- error_on_eof=True,
816
- )
817
- result = src[pos:end_pos]
818
- pos = end_pos + 3
819
- else:
820
- delim = '"'
821
- pos, result = toml_parse_basic_str(src, pos, multiline=True)
822
501
 
823
- # Add at maximum two extra apostrophes/quotes if the end sequence is 4 or 5 chars long instead of just 3.
824
- if not src.startswith(delim, pos):
825
- return pos, result
826
- pos += 1
827
- if not src.startswith(delim, pos):
828
- return pos, result + delim
829
- pos += 1
830
- return pos, result + (delim * 2)
502
+ class SignalReceiver:
503
+ def __init__(self) -> None:
504
+ super().__init__()
831
505
 
506
+ self._signals_recvd: ta.List[int] = []
832
507
 
833
- def toml_parse_basic_str(src: str, pos: TomlPos, *, multiline: bool) -> ta.Tuple[TomlPos, str]:
834
- if multiline:
835
- error_on = TOML_ILLEGAL_MULTILINE_BASIC_STR_CHARS
836
- parse_escapes = toml_parse_basic_str_escape_multiline
508
+ def receive(self, sig: int, frame: ta.Any = None) -> None:
509
+ if sig not in self._signals_recvd:
510
+ self._signals_recvd.append(sig)
511
+
512
+ def install(self, *sigs: int) -> None:
513
+ for sig in sigs:
514
+ signal.signal(sig, self.receive)
515
+
516
+ def get_signal(self) -> ta.Optional[int]:
517
+ if self._signals_recvd:
518
+ sig = self._signals_recvd.pop(0)
519
+ else:
520
+ sig = None
521
+ return sig
522
+
523
+
524
+ ########################################
525
+ # ../utils/strings.py
526
+
527
+
528
+ ##
529
+
530
+
531
+ def as_bytes(s: ta.Union[str, bytes], encoding: str = 'utf8') -> bytes:
532
+ if isinstance(s, bytes):
533
+ return s
837
534
  else:
838
- error_on = TOML_ILLEGAL_BASIC_STR_CHARS
839
- parse_escapes = toml_parse_basic_str_escape
840
- result = ''
841
- start_pos = pos
842
- while True:
843
- try:
844
- char = src[pos]
845
- except IndexError:
846
- raise toml_suffixed_err(src, pos, 'Unterminated string') from None
847
- if char == '"':
848
- if not multiline:
849
- return pos + 1, result + src[start_pos:pos]
850
- if src.startswith('"""', pos):
851
- return pos + 3, result + src[start_pos:pos]
852
- pos += 1
853
- continue
854
- if char == '\\':
855
- result += src[start_pos:pos]
856
- pos, parsed_escape = parse_escapes(src, pos)
857
- result += parsed_escape
858
- start_pos = pos
859
- continue
860
- if char in error_on:
861
- raise toml_suffixed_err(src, pos, f'Illegal character {char!r}')
862
- pos += 1
535
+ return s.encode(encoding)
863
536
 
864
537
 
865
- def toml_parse_value( # noqa: C901
866
- src: str,
867
- pos: TomlPos,
868
- parse_float: TomlParseFloat,
869
- ) -> ta.Tuple[TomlPos, ta.Any]:
870
- try:
871
- char: ta.Optional[str] = src[pos]
872
- except IndexError:
873
- char = None
538
+ @ta.overload
539
+ def find_prefix_at_end(haystack: str, needle: str) -> int:
540
+ ...
874
541
 
875
- # IMPORTANT: order conditions based on speed of checking and likelihood
876
542
 
877
- # Basic strings
878
- if char == '"':
879
- if src.startswith('"""', pos):
880
- return toml_parse_multiline_str(src, pos, literal=False)
881
- return toml_parse_one_line_basic_str(src, pos)
543
+ @ta.overload
544
+ def find_prefix_at_end(haystack: bytes, needle: bytes) -> int:
545
+ ...
882
546
 
883
- # Literal strings
884
- if char == "'":
885
- if src.startswith("'''", pos):
886
- return toml_parse_multiline_str(src, pos, literal=True)
887
- return toml_parse_literal_str(src, pos)
888
547
 
889
- # Booleans
890
- if char == 't':
891
- if src.startswith('true', pos):
892
- return pos + 4, True
893
- if char == 'f':
894
- if src.startswith('false', pos):
895
- return pos + 5, False
548
+ def find_prefix_at_end(haystack, needle):
549
+ l = len(needle) - 1
550
+ while l and not haystack.endswith(needle[:l]):
551
+ l -= 1
552
+ return l
896
553
 
897
- # Arrays
898
- if char == '[':
899
- return toml_parse_array(src, pos, parse_float)
900
554
 
901
- # Inline tables
902
- if char == '{':
903
- return toml_parse_inline_table(src, pos, parse_float)
555
+ ##
556
+
557
+
558
+ ANSI_ESCAPE_BEGIN = b'\x1b['
559
+ ANSI_TERMINATORS = (b'H', b'f', b'A', b'B', b'C', b'D', b'R', b's', b'u', b'J', b'K', b'h', b'l', b'p', b'm')
560
+
561
+
562
+ def strip_escapes(s: bytes) -> bytes:
563
+ """Remove all ANSI color escapes from the given string."""
564
+
565
+ result = b''
566
+ show = 1
567
+ i = 0
568
+ l = len(s)
569
+ while i < l:
570
+ if show == 0 and s[i:i + 1] in ANSI_TERMINATORS:
571
+ show = 1
572
+ elif show:
573
+ n = s.find(ANSI_ESCAPE_BEGIN, i)
574
+ if n == -1:
575
+ return result + s[i:]
576
+ else:
577
+ result = result + s[i:n]
578
+ i = n
579
+ show = 0
580
+ i += 1
581
+ return result
582
+
583
+
584
+ ##
585
+
586
+
587
+ class SuffixMultiplier:
588
+ # d is a dictionary of suffixes to integer multipliers. If no suffixes match, default is the multiplier. Matches are
589
+ # case insensitive. Return values are in the fundamental unit.
590
+ def __init__(self, d, default=1):
591
+ super().__init__()
592
+ self._d = d
593
+ self._default = default
594
+ # all keys must be the same size
595
+ self._keysz = None
596
+ for k in d:
597
+ if self._keysz is None:
598
+ self._keysz = len(k)
599
+ elif self._keysz != len(k): # type: ignore
600
+ raise ValueError(k)
601
+
602
+ def __call__(self, v: ta.Union[str, int]) -> int:
603
+ if isinstance(v, int):
604
+ return v
605
+ v = v.lower()
606
+ for s, m in self._d.items():
607
+ if v[-self._keysz:] == s: # type: ignore
608
+ return int(v[:-self._keysz]) * m # type: ignore
609
+ return int(v) * self._default
610
+
611
+
612
+ parse_bytes_size = SuffixMultiplier({
613
+ 'kb': 1024,
614
+ 'mb': 1024 * 1024,
615
+ 'gb': 1024 * 1024 * 1024,
616
+ })
617
+
618
+
619
+ #
620
+
621
+
622
+ def parse_octal(arg: ta.Union[str, int]) -> int:
623
+ if isinstance(arg, int):
624
+ return arg
625
+ try:
626
+ return int(arg, 8)
627
+ except (TypeError, ValueError):
628
+ raise ValueError(f'{arg} can not be converted to an octal type') # noqa
629
+
630
+
631
+ ########################################
632
+ # ../../../omlish/configs/types.py
904
633
 
905
- # Dates and times
906
- datetime_match = TOML_RE_DATETIME.match(src, pos)
907
- if datetime_match:
908
- try:
909
- datetime_obj = toml_match_to_datetime(datetime_match)
910
- except ValueError as e:
911
- raise toml_suffixed_err(src, pos, 'Invalid date or datetime') from e
912
- return datetime_match.end(), datetime_obj
913
- localtime_match = TOML_RE_LOCALTIME.match(src, pos)
914
- if localtime_match:
915
- return localtime_match.end(), toml_match_to_localtime(localtime_match)
916
634
 
917
- # Integers and "normal" floats. The regex will greedily match any type starting with a decimal char, so needs to be
918
- # located after handling of dates and times.
919
- number_match = TOML_RE_NUMBER.match(src, pos)
920
- if number_match:
921
- return number_match.end(), toml_match_to_number(number_match, parse_float)
635
+ #
922
636
 
923
- # Special floats
924
- first_three = src[pos:pos + 3]
925
- if first_three in {'inf', 'nan'}:
926
- return pos + 3, parse_float(first_three)
927
- first_four = src[pos:pos + 4]
928
- if first_four in {'-inf', '+inf', '-nan', '+nan'}:
929
- return pos + 4, parse_float(first_four)
930
637
 
931
- raise toml_suffixed_err(src, pos, 'Invalid value')
638
+ ########################################
639
+ # ../../../omlish/formats/ini/sections.py
932
640
 
933
641
 
934
- def toml_suffixed_err(src: str, pos: TomlPos, msg: str) -> TomlDecodeError:
935
- """Return a `TomlDecodeError` where error message is suffixed with coordinates in source."""
642
+ ##
936
643
 
937
- def coord_repr(src: str, pos: TomlPos) -> str:
938
- if pos >= len(src):
939
- return 'end of document'
940
- line = src.count('\n', 0, pos) + 1
941
- if line == 1:
942
- column = pos + 1
943
- else:
944
- column = pos - src.rindex('\n', 0, pos)
945
- return f'line {line}, column {column}'
946
644
 
947
- return TomlDecodeError(f'{msg} (at {coord_repr(src, pos)})')
645
+ def extract_ini_sections(cp: configparser.ConfigParser) -> IniSectionSettingsMap:
646
+ config_dct: ta.Dict[str, ta.Any] = {}
647
+ for sec in cp.sections():
648
+ cd = config_dct
649
+ for k in sec.split('.'):
650
+ cd = cd.setdefault(k, {})
651
+ cd.update(cp.items(sec))
652
+ return config_dct
948
653
 
949
654
 
950
- def toml_is_unicode_scalar_value(codepoint: int) -> bool:
951
- return (0 <= codepoint <= 55295) or (57344 <= codepoint <= 1114111)
655
+ ##
952
656
 
953
657
 
954
- def toml_make_safe_parse_float(parse_float: TomlParseFloat) -> TomlParseFloat:
955
- """A decorator to make `parse_float` safe.
658
+ def render_ini_sections(
659
+ settings_by_section: IniSectionSettingsMap,
660
+ ) -> str:
661
+ out = io.StringIO()
956
662
 
957
- `parse_float` must not return dicts or lists, because these types would be mixed with parsed TOML tables and arrays,
958
- thus confusing the parser. The returned decorated callable raises `ValueError` instead of returning illegal types.
959
- """
960
- # The default `float` callable never returns illegal types. Optimize it.
961
- if parse_float is float:
962
- return float
663
+ for i, (section, settings) in enumerate(settings_by_section.items()):
664
+ if i:
665
+ out.write('\n')
963
666
 
964
- def safe_parse_float(float_str: str) -> ta.Any:
965
- float_value = parse_float(float_str)
966
- if isinstance(float_value, (dict, list)):
967
- raise ValueError('parse_float must not return dicts or lists') # noqa
968
- return float_value
667
+ out.write(f'[{section}]\n')
969
668
 
970
- return safe_parse_float
669
+ for k, v in settings.items():
670
+ if isinstance(v, str):
671
+ out.write(f'{k}={v}\n')
672
+ else:
673
+ for vv in v:
674
+ out.write(f'{k}={vv}\n')
675
+
676
+ return out.getvalue()
971
677
 
972
678
 
973
679
  ########################################
974
- # ../exceptions.py
680
+ # ../../../omlish/formats/toml/parser.py
681
+ # SPDX-License-Identifier: MIT
682
+ # SPDX-FileCopyrightText: 2021 Taneli Hukkinen
683
+ # Licensed to PSF under a Contributor Agreement.
684
+ #
685
+ # PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
686
+ # --------------------------------------------
687
+ #
688
+ # 1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and the Individual or Organization
689
+ # ("Licensee") accessing and otherwise using this software ("Python") in source or binary form and its associated
690
+ # documentation.
691
+ #
692
+ # 2. Subject to the terms and conditions of this License Agreement, PSF hereby grants Licensee a nonexclusive,
693
+ # royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative
694
+ # works, distribute, and otherwise use Python alone or in any derivative version, provided, however, that PSF's License
695
+ # Agreement and PSF's notice of copyright, i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
696
+ # 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation; All
697
+ # Rights Reserved" are retained in Python alone or in any derivative version prepared by Licensee.
698
+ #
699
+ # 3. In the event Licensee prepares a derivative work that is based on or incorporates Python or any part thereof, and
700
+ # wants to make the derivative work available to others as provided herein, then Licensee hereby agrees to include in
701
+ # any such work a brief summary of the changes made to Python.
702
+ #
703
+ # 4. PSF is making Python available to Licensee on an "AS IS" basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES,
704
+ # EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY
705
+ # OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT INFRINGE ANY THIRD PARTY
706
+ # RIGHTS.
707
+ #
708
+ # 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL
709
+ # DAMAGES OR LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, OR ANY DERIVATIVE THEREOF, EVEN IF
710
+ # ADVISED OF THE POSSIBILITY THEREOF.
711
+ #
712
+ # 6. This License Agreement will automatically terminate upon a material breach of its terms and conditions.
713
+ #
714
+ # 7. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint
715
+ # venture between PSF and Licensee. This License Agreement does not grant permission to use PSF trademarks or trade
716
+ # name in a trademark sense to endorse or promote products or services of Licensee, or any third party.
717
+ #
718
+ # 8. By copying, installing or otherwise using Python, Licensee agrees to be bound by the terms and conditions of this
719
+ # License Agreement.
720
+ #
721
+ # https://github.com/python/cpython/blob/9ce90206b7a4649600218cf0bd4826db79c9a312/Lib/tomllib/_parser.py
975
722
 
976
723
 
977
- class ProcessError(Exception):
978
- """Specialized exceptions used when attempting to start a process."""
724
+ ##
979
725
 
980
726
 
981
- class BadCommandError(ProcessError):
982
- """Indicates the command could not be parsed properly."""
727
+ _TOML_TIME_RE_STR = r'([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(?:\.([0-9]{1,6})[0-9]*)?'
728
+
729
+ TOML_RE_NUMBER = re.compile(
730
+ r"""
731
+ 0
732
+ (?:
733
+ x[0-9A-Fa-f](?:_?[0-9A-Fa-f])* # hex
734
+ |
735
+ b[01](?:_?[01])* # bin
736
+ |
737
+ o[0-7](?:_?[0-7])* # oct
738
+ )
739
+ |
740
+ [+-]?(?:0|[1-9](?:_?[0-9])*) # dec, integer part
741
+ (?P<floatpart>
742
+ (?:\.[0-9](?:_?[0-9])*)? # optional fractional part
743
+ (?:[eE][+-]?[0-9](?:_?[0-9])*)? # optional exponent part
744
+ )
745
+ """,
746
+ flags=re.VERBOSE,
747
+ )
748
+ TOML_RE_LOCALTIME = re.compile(_TOML_TIME_RE_STR)
749
+ TOML_RE_DATETIME = re.compile(
750
+ rf"""
751
+ ([0-9]{{4}})-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01]) # date, e.g. 1988-10-27
752
+ (?:
753
+ [Tt ]
754
+ {_TOML_TIME_RE_STR}
755
+ (?:([Zz])|([+-])([01][0-9]|2[0-3]):([0-5][0-9]))? # optional time offset
756
+ )?
757
+ """,
758
+ flags=re.VERBOSE,
759
+ )
983
760
 
984
761
 
985
- class NotExecutableError(ProcessError):
986
- """
987
- Indicates that the filespec cannot be executed because its path resolves to a file which is not executable, or which
988
- is a directory.
762
+ def toml_match_to_datetime(match: re.Match) -> ta.Union[datetime.datetime, datetime.date]:
763
+ """Convert a `RE_DATETIME` match to `datetime.datetime` or `datetime.date`.
764
+
765
+ Raises ValueError if the match does not correspond to a valid date or datetime.
989
766
  """
767
+ (
768
+ year_str,
769
+ month_str,
770
+ day_str,
771
+ hour_str,
772
+ minute_str,
773
+ sec_str,
774
+ micros_str,
775
+ zulu_time,
776
+ offset_sign_str,
777
+ offset_hour_str,
778
+ offset_minute_str,
779
+ ) = match.groups()
780
+ year, month, day = int(year_str), int(month_str), int(day_str)
781
+ if hour_str is None:
782
+ return datetime.date(year, month, day)
783
+ hour, minute, sec = int(hour_str), int(minute_str), int(sec_str)
784
+ micros = int(micros_str.ljust(6, '0')) if micros_str else 0
785
+ if offset_sign_str:
786
+ tz: ta.Optional[datetime.tzinfo] = toml_cached_tz(
787
+ offset_hour_str, offset_minute_str, offset_sign_str,
788
+ )
789
+ elif zulu_time:
790
+ tz = datetime.UTC
791
+ else: # local date-time
792
+ tz = None
793
+ return datetime.datetime(year, month, day, hour, minute, sec, micros, tzinfo=tz)
990
794
 
991
795
 
992
- class NotFoundError(ProcessError):
993
- """Indicates that the filespec cannot be executed because it could not be found."""
796
+ @functools.lru_cache() # noqa
797
+ def toml_cached_tz(hour_str: str, minute_str: str, sign_str: str) -> datetime.timezone:
798
+ sign = 1 if sign_str == '+' else -1
799
+ return datetime.timezone(
800
+ datetime.timedelta(
801
+ hours=sign * int(hour_str),
802
+ minutes=sign * int(minute_str),
803
+ ),
804
+ )
994
805
 
995
806
 
996
- class NoPermissionError(ProcessError):
997
- """
998
- Indicates that the file cannot be executed because the supervisor process does not possess the appropriate UNIX
999
- filesystem permission to execute the file.
1000
- """
807
+ def toml_match_to_localtime(match: re.Match) -> datetime.time:
808
+ hour_str, minute_str, sec_str, micros_str = match.groups()
809
+ micros = int(micros_str.ljust(6, '0')) if micros_str else 0
810
+ return datetime.time(int(hour_str), int(minute_str), int(sec_str), micros)
1001
811
 
1002
812
 
1003
- ########################################
1004
- # ../privileges.py
813
+ def toml_match_to_number(match: re.Match, parse_float: TomlParseFloat) -> ta.Any:
814
+ if match.group('floatpart'):
815
+ return parse_float(match.group())
816
+ return int(match.group(), 0)
1005
817
 
1006
818
 
1007
- def drop_privileges(user: ta.Union[int, str, None]) -> ta.Optional[str]:
1008
- """
1009
- Drop privileges to become the specified user, which may be a username or uid. Called for supervisord startup and
1010
- when spawning subprocesses. Returns None on success or a string error message if privileges could not be dropped.
1011
- """
819
+ TOML_ASCII_CTRL = frozenset(chr(i) for i in range(32)) | frozenset(chr(127))
1012
820
 
1013
- if user is None:
1014
- return 'No user specified to setuid to!'
821
+ # Neither of these sets include quotation mark or backslash. They are currently handled as separate cases in the parser
822
+ # functions.
823
+ TOML_ILLEGAL_BASIC_STR_CHARS = TOML_ASCII_CTRL - frozenset('\t')
824
+ TOML_ILLEGAL_MULTILINE_BASIC_STR_CHARS = TOML_ASCII_CTRL - frozenset('\t\n')
1015
825
 
1016
- # get uid for user, which can be a number or username
1017
- try:
1018
- uid = int(user)
1019
- except ValueError:
1020
- try:
1021
- pwrec = pwd.getpwnam(user) # type: ignore
1022
- except KeyError:
1023
- return f"Can't find username {user!r}"
1024
- uid = pwrec[2]
1025
- else:
1026
- try:
1027
- pwrec = pwd.getpwuid(uid)
1028
- except KeyError:
1029
- return f"Can't find uid {uid!r}"
826
+ TOML_ILLEGAL_LITERAL_STR_CHARS = TOML_ILLEGAL_BASIC_STR_CHARS
827
+ TOML_ILLEGAL_MULTILINE_LITERAL_STR_CHARS = TOML_ILLEGAL_MULTILINE_BASIC_STR_CHARS
1030
828
 
1031
- current_uid = os.getuid()
829
+ TOML_ILLEGAL_COMMENT_CHARS = TOML_ILLEGAL_BASIC_STR_CHARS
1032
830
 
1033
- if current_uid == uid:
1034
- # do nothing and return successfully if the uid is already the current one. this allows a supervisord running as
1035
- # an unprivileged user "foo" to start a process where the config has "user=foo" (same user) in it.
1036
- return None
831
+ TOML_WS = frozenset(' \t')
832
+ TOML_WS_AND_NEWLINE = TOML_WS | frozenset('\n')
833
+ TOML_BARE_KEY_CHARS = frozenset(string.ascii_letters + string.digits + '-_')
834
+ TOML_KEY_INITIAL_CHARS = TOML_BARE_KEY_CHARS | frozenset("\"'")
835
+ TOML_HEXDIGIT_CHARS = frozenset(string.hexdigits)
1037
836
 
1038
- if current_uid != 0:
1039
- return "Can't drop privilege as nonroot user"
837
+ TOML_BASIC_STR_ESCAPE_REPLACEMENTS = types.MappingProxyType(
838
+ {
839
+ '\\b': '\u0008', # backspace
840
+ '\\t': '\u0009', # tab
841
+ '\\n': '\u000A', # linefeed
842
+ '\\f': '\u000C', # form feed
843
+ '\\r': '\u000D', # carriage return
844
+ '\\"': '\u0022', # quote
845
+ '\\\\': '\u005C', # backslash
846
+ },
847
+ )
1040
848
 
1041
- gid = pwrec[3]
1042
- if hasattr(os, 'setgroups'):
1043
- user = pwrec[0]
1044
- groups = [grprec[2] for grprec in grp.getgrall() if user in grprec[3]]
1045
849
 
1046
- # always put our primary gid first in this list, otherwise we can lose group info since sometimes the first
1047
- # group in the setgroups list gets overwritten on the subsequent setgid call (at least on freebsd 9 with
1048
- # python 2.7 - this will be safe though for all unix /python version combos)
1049
- groups.insert(0, gid)
1050
- try:
1051
- os.setgroups(groups)
1052
- except OSError:
1053
- return 'Could not set groups of effective user'
850
+ class TomlDecodeError(ValueError):
851
+ """An error raised if a document is not valid TOML."""
1054
852
 
853
+
854
+ def toml_load(fp: ta.BinaryIO, /, *, parse_float: TomlParseFloat = float) -> ta.Dict[str, ta.Any]:
855
+ """Parse TOML from a binary file object."""
856
+ b = fp.read()
1055
857
  try:
1056
- os.setgid(gid)
1057
- except OSError:
1058
- return 'Could not set group id of effective user'
858
+ s = b.decode()
859
+ except AttributeError:
860
+ raise TypeError("File must be opened in binary mode, e.g. use `open('foo.toml', 'rb')`") from None
861
+ return toml_loads(s, parse_float=parse_float)
1059
862
 
1060
- os.setuid(uid)
1061
863
 
1062
- return None
864
+ def toml_loads(s: str, /, *, parse_float: TomlParseFloat = float) -> ta.Dict[str, ta.Any]: # noqa: C901
865
+ """Parse TOML from a string."""
1063
866
 
867
+ # The spec allows converting "\r\n" to "\n", even in string literals. Let's do so to simplify parsing.
868
+ try:
869
+ src = s.replace('\r\n', '\n')
870
+ except (AttributeError, TypeError):
871
+ raise TypeError(f"Expected str object, not '{type(s).__qualname__}'") from None
872
+ pos = 0
873
+ out = TomlOutput(TomlNestedDict(), TomlFlags())
874
+ header: TomlKey = ()
875
+ parse_float = toml_make_safe_parse_float(parse_float)
1064
876
 
1065
- ########################################
1066
- # ../states.py
877
+ # Parse one statement at a time (typically means one line in TOML source)
878
+ while True:
879
+ # 1. Skip line leading whitespace
880
+ pos = toml_skip_chars(src, pos, TOML_WS)
1067
881
 
882
+ # 2. Parse rules. Expect one of the following:
883
+ # - end of file
884
+ # - end of line
885
+ # - comment
886
+ # - key/value pair
887
+ # - append dict to list (and move to its namespace)
888
+ # - create dict (and move to its namespace)
889
+ # Skip trailing whitespace when applicable.
890
+ try:
891
+ char = src[pos]
892
+ except IndexError:
893
+ break
894
+ if char == '\n':
895
+ pos += 1
896
+ continue
897
+ if char in TOML_KEY_INITIAL_CHARS:
898
+ pos = toml_key_value_rule(src, pos, out, header, parse_float)
899
+ pos = toml_skip_chars(src, pos, TOML_WS)
900
+ elif char == '[':
901
+ try:
902
+ second_char: ta.Optional[str] = src[pos + 1]
903
+ except IndexError:
904
+ second_char = None
905
+ out.flags.finalize_pending()
906
+ if second_char == '[':
907
+ pos, header = toml_create_list_rule(src, pos, out)
908
+ else:
909
+ pos, header = toml_create_dict_rule(src, pos, out)
910
+ pos = toml_skip_chars(src, pos, TOML_WS)
911
+ elif char != '#':
912
+ raise toml_suffixed_err(src, pos, 'Invalid statement')
1068
913
 
1069
- ##
914
+ # 3. Skip comment
915
+ pos = toml_skip_comment(src, pos)
1070
916
 
917
+ # 4. Expect end of line or end of file
918
+ try:
919
+ char = src[pos]
920
+ except IndexError:
921
+ break
922
+ if char != '\n':
923
+ raise toml_suffixed_err(
924
+ src, pos, 'Expected newline or end of document after a statement',
925
+ )
926
+ pos += 1
1071
927
 
1072
- class ProcessState(enum.IntEnum):
1073
- STOPPED = 0
1074
- STARTING = 10
1075
- RUNNING = 20
1076
- BACKOFF = 30
1077
- STOPPING = 40
1078
- EXITED = 100
1079
- FATAL = 200
1080
- UNKNOWN = 1000
928
+ return out.data.dict
1081
929
 
1082
- @property
1083
- def stopped(self) -> bool:
1084
- return self in STOPPED_STATES
1085
930
 
1086
- @property
1087
- def running(self) -> bool:
1088
- return self in RUNNING_STATES
931
+ class TomlFlags:
932
+ """Flags that map to parsed keys/namespaces."""
1089
933
 
1090
- @property
1091
- def signalable(self) -> bool:
1092
- return self in SIGNALABLE_STATES
934
+ # Marks an immutable namespace (inline array or inline table).
935
+ FROZEN = 0
936
+ # Marks a nest that has been explicitly created and can no longer be opened using the "[table]" syntax.
937
+ EXPLICIT_NEST = 1
1093
938
 
939
+ def __init__(self) -> None:
940
+ self._flags: ta.Dict[str, dict] = {}
941
+ self._pending_flags: ta.Set[ta.Tuple[TomlKey, int]] = set()
1094
942
 
1095
- # http://supervisord.org/subprocess.html
1096
- STATE_TRANSITIONS = {
1097
- ProcessState.STOPPED: (ProcessState.STARTING,),
1098
- ProcessState.STARTING: (ProcessState.RUNNING, ProcessState.BACKOFF, ProcessState.STOPPING),
1099
- ProcessState.RUNNING: (ProcessState.STOPPING, ProcessState.EXITED),
1100
- ProcessState.BACKOFF: (ProcessState.STARTING, ProcessState.FATAL),
1101
- ProcessState.STOPPING: (ProcessState.STOPPED,),
1102
- ProcessState.EXITED: (ProcessState.STARTING,),
1103
- ProcessState.FATAL: (ProcessState.STARTING,),
1104
- }
943
+ def add_pending(self, key: TomlKey, flag: int) -> None:
944
+ self._pending_flags.add((key, flag))
1105
945
 
1106
- STOPPED_STATES = (
1107
- ProcessState.STOPPED,
1108
- ProcessState.EXITED,
1109
- ProcessState.FATAL,
1110
- ProcessState.UNKNOWN,
1111
- )
946
+ def finalize_pending(self) -> None:
947
+ for key, flag in self._pending_flags:
948
+ self.set(key, flag, recursive=False)
949
+ self._pending_flags.clear()
1112
950
 
1113
- RUNNING_STATES = (
1114
- ProcessState.RUNNING,
1115
- ProcessState.BACKOFF,
1116
- ProcessState.STARTING,
1117
- )
951
+ def unset_all(self, key: TomlKey) -> None:
952
+ cont = self._flags
953
+ for k in key[:-1]:
954
+ if k not in cont:
955
+ return
956
+ cont = cont[k]['nested']
957
+ cont.pop(key[-1], None)
1118
958
 
1119
- SIGNALABLE_STATES = (
1120
- ProcessState.RUNNING,
1121
- ProcessState.STARTING,
1122
- ProcessState.STOPPING,
1123
- )
959
+ def set(self, key: TomlKey, flag: int, *, recursive: bool) -> None: # noqa: A003
960
+ cont = self._flags
961
+ key_parent, key_stem = key[:-1], key[-1]
962
+ for k in key_parent:
963
+ if k not in cont:
964
+ cont[k] = {'flags': set(), 'recursive_flags': set(), 'nested': {}}
965
+ cont = cont[k]['nested']
966
+ if key_stem not in cont:
967
+ cont[key_stem] = {'flags': set(), 'recursive_flags': set(), 'nested': {}}
968
+ cont[key_stem]['recursive_flags' if recursive else 'flags'].add(flag)
1124
969
 
970
+ def is_(self, key: TomlKey, flag: int) -> bool:
971
+ if not key:
972
+ return False # document root has no flags
973
+ cont = self._flags
974
+ for k in key[:-1]:
975
+ if k not in cont:
976
+ return False
977
+ inner_cont = cont[k]
978
+ if flag in inner_cont['recursive_flags']:
979
+ return True
980
+ cont = inner_cont['nested']
981
+ key_stem = key[-1]
982
+ if key_stem in cont:
983
+ cont = cont[key_stem]
984
+ return flag in cont['flags'] or flag in cont['recursive_flags']
985
+ return False
1125
986
 
1126
- ##
1127
987
 
988
+ class TomlNestedDict:
989
+ def __init__(self) -> None:
990
+ # The parsed content of the TOML document
991
+ self.dict: ta.Dict[str, ta.Any] = {}
1128
992
 
1129
- class SupervisorState(enum.IntEnum):
1130
- FATAL = 2
1131
- RUNNING = 1
1132
- RESTARTING = 0
1133
- SHUTDOWN = -1
993
+ def get_or_create_nest(
994
+ self,
995
+ key: TomlKey,
996
+ *,
997
+ access_lists: bool = True,
998
+ ) -> dict:
999
+ cont: ta.Any = self.dict
1000
+ for k in key:
1001
+ if k not in cont:
1002
+ cont[k] = {}
1003
+ cont = cont[k]
1004
+ if access_lists and isinstance(cont, list):
1005
+ cont = cont[-1]
1006
+ if not isinstance(cont, dict):
1007
+ raise KeyError('There is no nest behind this key')
1008
+ return cont
1134
1009
 
1010
+ def append_nest_to_list(self, key: TomlKey) -> None:
1011
+ cont = self.get_or_create_nest(key[:-1])
1012
+ last_key = key[-1]
1013
+ if last_key in cont:
1014
+ list_ = cont[last_key]
1015
+ if not isinstance(list_, list):
1016
+ raise KeyError('An object other than list found behind this key')
1017
+ list_.append({})
1018
+ else:
1019
+ cont[last_key] = [{}]
1135
1020
 
1136
- ########################################
1137
- # ../utils/collections.py
1138
1021
 
1022
+ class TomlOutput(ta.NamedTuple):
1023
+ data: TomlNestedDict
1024
+ flags: TomlFlags
1139
1025
 
1140
- class KeyedCollectionAccessors(abc.ABC, ta.Generic[K, V]):
1141
- @property
1142
- @abc.abstractmethod
1143
- def _by_key(self) -> ta.Mapping[K, V]:
1144
- raise NotImplementedError
1145
1026
 
1146
- def __iter__(self) -> ta.Iterator[V]:
1147
- return iter(self._by_key.values())
1027
+ def toml_skip_chars(src: str, pos: TomlPos, chars: ta.Iterable[str]) -> TomlPos:
1028
+ try:
1029
+ while src[pos] in chars:
1030
+ pos += 1
1031
+ except IndexError:
1032
+ pass
1033
+ return pos
1148
1034
 
1149
- def __len__(self) -> int:
1150
- return len(self._by_key)
1151
1035
 
1152
- def __contains__(self, key: K) -> bool:
1153
- return key in self._by_key
1036
+ def toml_skip_until(
1037
+ src: str,
1038
+ pos: TomlPos,
1039
+ expect: str,
1040
+ *,
1041
+ error_on: ta.FrozenSet[str],
1042
+ error_on_eof: bool,
1043
+ ) -> TomlPos:
1044
+ try:
1045
+ new_pos = src.index(expect, pos)
1046
+ except ValueError:
1047
+ new_pos = len(src)
1048
+ if error_on_eof:
1049
+ raise toml_suffixed_err(src, new_pos, f'Expected {expect!r}') from None
1050
+
1051
+ if not error_on.isdisjoint(src[pos:new_pos]):
1052
+ while src[pos] not in error_on:
1053
+ pos += 1
1054
+ raise toml_suffixed_err(src, pos, f'Found invalid character {src[pos]!r}')
1055
+ return new_pos
1154
1056
 
1155
- def __getitem__(self, key: K) -> V:
1156
- return self._by_key[key]
1157
1057
 
1158
- def get(self, key: K, default: ta.Optional[V] = None) -> ta.Optional[V]:
1159
- return self._by_key.get(key, default)
1058
+ def toml_skip_comment(src: str, pos: TomlPos) -> TomlPos:
1059
+ try:
1060
+ char: ta.Optional[str] = src[pos]
1061
+ except IndexError:
1062
+ char = None
1063
+ if char == '#':
1064
+ return toml_skip_until(
1065
+ src, pos + 1, '\n', error_on=TOML_ILLEGAL_COMMENT_CHARS, error_on_eof=False,
1066
+ )
1067
+ return pos
1160
1068
 
1161
- def items(self) -> ta.Iterator[ta.Tuple[K, V]]:
1162
- return iter(self._by_key.items())
1163
1069
 
1070
+ def toml_skip_comments_and_array_ws(src: str, pos: TomlPos) -> TomlPos:
1071
+ while True:
1072
+ pos_before_skip = pos
1073
+ pos = toml_skip_chars(src, pos, TOML_WS_AND_NEWLINE)
1074
+ pos = toml_skip_comment(src, pos)
1075
+ if pos == pos_before_skip:
1076
+ return pos
1164
1077
 
1165
- class KeyedCollection(KeyedCollectionAccessors[K, V]):
1166
- def __init__(self, items: ta.Iterable[V]) -> None:
1167
- super().__init__()
1168
1078
 
1169
- by_key: ta.Dict[K, V] = {}
1170
- for v in items:
1171
- if (k := self._key(v)) in by_key:
1172
- raise KeyError(f'key {k} of {v} already registered by {by_key[k]}')
1173
- by_key[k] = v
1174
- self.__by_key = by_key
1079
+ def toml_create_dict_rule(src: str, pos: TomlPos, out: TomlOutput) -> ta.Tuple[TomlPos, TomlKey]:
1080
+ pos += 1 # Skip "["
1081
+ pos = toml_skip_chars(src, pos, TOML_WS)
1082
+ pos, key = toml_parse_key(src, pos)
1175
1083
 
1176
- @property
1177
- def _by_key(self) -> ta.Mapping[K, V]:
1178
- return self.__by_key
1084
+ if out.flags.is_(key, TomlFlags.EXPLICIT_NEST) or out.flags.is_(key, TomlFlags.FROZEN):
1085
+ raise toml_suffixed_err(src, pos, f'Cannot declare {key} twice')
1086
+ out.flags.set(key, TomlFlags.EXPLICIT_NEST, recursive=False)
1087
+ try:
1088
+ out.data.get_or_create_nest(key)
1089
+ except KeyError:
1090
+ raise toml_suffixed_err(src, pos, 'Cannot overwrite a value') from None
1179
1091
 
1180
- @abc.abstractmethod
1181
- def _key(self, v: V) -> K:
1182
- raise NotImplementedError
1092
+ if not src.startswith(']', pos):
1093
+ raise toml_suffixed_err(src, pos, "Expected ']' at the end of a table declaration")
1094
+ return pos + 1, key
1183
1095
 
1184
1096
 
1185
- ########################################
1186
- # ../utils/diag.py
1097
+ def toml_create_list_rule(src: str, pos: TomlPos, out: TomlOutput) -> ta.Tuple[TomlPos, TomlKey]:
1098
+ pos += 2 # Skip "[["
1099
+ pos = toml_skip_chars(src, pos, TOML_WS)
1100
+ pos, key = toml_parse_key(src, pos)
1187
1101
 
1102
+ if out.flags.is_(key, TomlFlags.FROZEN):
1103
+ raise toml_suffixed_err(src, pos, f'Cannot mutate immutable namespace {key}')
1104
+ # Free the namespace now that it points to another empty list item...
1105
+ out.flags.unset_all(key)
1106
+ # ...but this key precisely is still prohibited from table declaration
1107
+ out.flags.set(key, TomlFlags.EXPLICIT_NEST, recursive=False)
1108
+ try:
1109
+ out.data.append_nest_to_list(key)
1110
+ except KeyError:
1111
+ raise toml_suffixed_err(src, pos, 'Cannot overwrite a value') from None
1188
1112
 
1189
- def compact_traceback() -> ta.Tuple[
1190
- ta.Tuple[str, str, int],
1191
- ta.Type[BaseException],
1192
- BaseException,
1193
- types.TracebackType,
1194
- ]:
1195
- t, v, tb = sys.exc_info()
1196
- if not tb:
1197
- raise RuntimeError('No traceback')
1113
+ if not src.startswith(']]', pos):
1114
+ raise toml_suffixed_err(src, pos, "Expected ']]' at the end of an array declaration")
1115
+ return pos + 2, key
1198
1116
 
1199
- tbinfo = []
1200
- while tb:
1201
- tbinfo.append((
1202
- tb.tb_frame.f_code.co_filename,
1203
- tb.tb_frame.f_code.co_name,
1204
- str(tb.tb_lineno),
1205
- ))
1206
- tb = tb.tb_next
1207
1117
 
1208
- # just to be safe
1209
- del tb
1118
+ def toml_key_value_rule(
1119
+ src: str,
1120
+ pos: TomlPos,
1121
+ out: TomlOutput,
1122
+ header: TomlKey,
1123
+ parse_float: TomlParseFloat,
1124
+ ) -> TomlPos:
1125
+ pos, key, value = toml_parse_key_value_pair(src, pos, parse_float)
1126
+ key_parent, key_stem = key[:-1], key[-1]
1127
+ abs_key_parent = header + key_parent
1210
1128
 
1211
- file, function, line = tbinfo[-1]
1212
- info = ' '.join(['[%s|%s|%s]' % x for x in tbinfo]) # noqa
1213
- return (file, function, line), t, v, info # type: ignore
1129
+ relative_path_cont_keys = (header + key[:i] for i in range(1, len(key)))
1130
+ for cont_key in relative_path_cont_keys:
1131
+ # Check that dotted key syntax does not redefine an existing table
1132
+ if out.flags.is_(cont_key, TomlFlags.EXPLICIT_NEST):
1133
+ raise toml_suffixed_err(src, pos, f'Cannot redefine namespace {cont_key}')
1134
+ # Containers in the relative path can't be opened with the table syntax or dotted key/value syntax in following
1135
+ # table sections.
1136
+ out.flags.add_pending(cont_key, TomlFlags.EXPLICIT_NEST)
1214
1137
 
1138
+ if out.flags.is_(abs_key_parent, TomlFlags.FROZEN):
1139
+ raise toml_suffixed_err(
1140
+ src,
1141
+ pos,
1142
+ f'Cannot mutate immutable namespace {abs_key_parent}',
1143
+ )
1215
1144
 
1216
- ########################################
1217
- # ../utils/fs.py
1145
+ try:
1146
+ nest = out.data.get_or_create_nest(abs_key_parent)
1147
+ except KeyError:
1148
+ raise toml_suffixed_err(src, pos, 'Cannot overwrite a value') from None
1149
+ if key_stem in nest:
1150
+ raise toml_suffixed_err(src, pos, 'Cannot overwrite a value')
1151
+ # Mark inline table and array namespaces recursively immutable
1152
+ if isinstance(value, (dict, list)):
1153
+ out.flags.set(header + key, TomlFlags.FROZEN, recursive=True)
1154
+ nest[key_stem] = value
1155
+ return pos
1218
1156
 
1219
1157
 
1220
- def try_unlink(path: str) -> bool:
1158
+ def toml_parse_key_value_pair(
1159
+ src: str,
1160
+ pos: TomlPos,
1161
+ parse_float: TomlParseFloat,
1162
+ ) -> ta.Tuple[TomlPos, TomlKey, ta.Any]:
1163
+ pos, key = toml_parse_key(src, pos)
1221
1164
  try:
1222
- os.unlink(path)
1223
- except OSError:
1224
- return False
1225
- return True
1165
+ char: ta.Optional[str] = src[pos]
1166
+ except IndexError:
1167
+ char = None
1168
+ if char != '=':
1169
+ raise toml_suffixed_err(src, pos, "Expected '=' after a key in a key/value pair")
1170
+ pos += 1
1171
+ pos = toml_skip_chars(src, pos, TOML_WS)
1172
+ pos, value = toml_parse_value(src, pos, parse_float)
1173
+ return pos, key, value
1226
1174
 
1227
1175
 
1228
- def mktempfile(suffix: str, prefix: str, dir: str) -> str: # noqa
1229
- fd, filename = tempfile.mkstemp(suffix, prefix, dir)
1230
- os.close(fd)
1231
- return filename
1176
+ def toml_parse_key(src: str, pos: TomlPos) -> ta.Tuple[TomlPos, TomlKey]:
1177
+ pos, key_part = toml_parse_key_part(src, pos)
1178
+ key: TomlKey = (key_part,)
1179
+ pos = toml_skip_chars(src, pos, TOML_WS)
1180
+ while True:
1181
+ try:
1182
+ char: ta.Optional[str] = src[pos]
1183
+ except IndexError:
1184
+ char = None
1185
+ if char != '.':
1186
+ return pos, key
1187
+ pos += 1
1188
+ pos = toml_skip_chars(src, pos, TOML_WS)
1189
+ pos, key_part = toml_parse_key_part(src, pos)
1190
+ key += (key_part,)
1191
+ pos = toml_skip_chars(src, pos, TOML_WS)
1232
1192
 
1233
1193
 
1234
- def get_path() -> ta.Sequence[str]:
1235
- """Return a list corresponding to $PATH, or a default."""
1194
+ def toml_parse_key_part(src: str, pos: TomlPos) -> ta.Tuple[TomlPos, str]:
1195
+ try:
1196
+ char: ta.Optional[str] = src[pos]
1197
+ except IndexError:
1198
+ char = None
1199
+ if char in TOML_BARE_KEY_CHARS:
1200
+ start_pos = pos
1201
+ pos = toml_skip_chars(src, pos, TOML_BARE_KEY_CHARS)
1202
+ return pos, src[start_pos:pos]
1203
+ if char == "'":
1204
+ return toml_parse_literal_str(src, pos)
1205
+ if char == '"':
1206
+ return toml_parse_one_line_basic_str(src, pos)
1207
+ raise toml_suffixed_err(src, pos, 'Invalid initial character for a key part')
1236
1208
 
1237
- path = ['/bin', '/usr/bin', '/usr/local/bin']
1238
- if 'PATH' in os.environ:
1239
- p = os.environ['PATH']
1240
- if p:
1241
- path = p.split(os.pathsep)
1242
- return path
1243
1209
 
1210
+ def toml_parse_one_line_basic_str(src: str, pos: TomlPos) -> ta.Tuple[TomlPos, str]:
1211
+ pos += 1
1212
+ return toml_parse_basic_str(src, pos, multiline=False)
1244
1213
 
1245
- def check_existing_dir(v: str) -> str:
1246
- nv = os.path.expanduser(v)
1247
- if os.path.isdir(nv):
1248
- return nv
1249
- raise ValueError(f'{v} is not an existing directory')
1250
1214
 
1215
+ def toml_parse_array(src: str, pos: TomlPos, parse_float: TomlParseFloat) -> ta.Tuple[TomlPos, list]:
1216
+ pos += 1
1217
+ array: list = []
1251
1218
 
1252
- def check_path_with_existing_dir(v: str) -> str:
1253
- nv = os.path.expanduser(v)
1254
- dir = os.path.dirname(nv) # noqa
1255
- if not dir:
1256
- # relative pathname with no directory component
1257
- return nv
1258
- if os.path.isdir(dir):
1259
- return nv
1260
- raise ValueError(f'The directory named as part of the path {v} does not exist')
1219
+ pos = toml_skip_comments_and_array_ws(src, pos)
1220
+ if src.startswith(']', pos):
1221
+ return pos + 1, array
1222
+ while True:
1223
+ pos, val = toml_parse_value(src, pos, parse_float)
1224
+ array.append(val)
1225
+ pos = toml_skip_comments_and_array_ws(src, pos)
1226
+
1227
+ c = src[pos:pos + 1]
1228
+ if c == ']':
1229
+ return pos + 1, array
1230
+ if c != ',':
1231
+ raise toml_suffixed_err(src, pos, 'Unclosed array')
1232
+ pos += 1
1261
1233
 
1234
+ pos = toml_skip_comments_and_array_ws(src, pos)
1235
+ if src.startswith(']', pos):
1236
+ return pos + 1, array
1262
1237
 
1263
- ########################################
1264
- # ../utils/ostypes.py
1265
1238
 
1239
+ def toml_parse_inline_table(src: str, pos: TomlPos, parse_float: TomlParseFloat) -> ta.Tuple[TomlPos, dict]:
1240
+ pos += 1
1241
+ nested_dict = TomlNestedDict()
1242
+ flags = TomlFlags()
1266
1243
 
1267
- Fd = ta.NewType('Fd', int)
1268
- Pid = ta.NewType('Pid', int)
1269
- Rc = ta.NewType('Rc', int)
1244
+ pos = toml_skip_chars(src, pos, TOML_WS)
1245
+ if src.startswith('}', pos):
1246
+ return pos + 1, nested_dict.dict
1247
+ while True:
1248
+ pos, key, value = toml_parse_key_value_pair(src, pos, parse_float)
1249
+ key_parent, key_stem = key[:-1], key[-1]
1250
+ if flags.is_(key, TomlFlags.FROZEN):
1251
+ raise toml_suffixed_err(src, pos, f'Cannot mutate immutable namespace {key}')
1252
+ try:
1253
+ nest = nested_dict.get_or_create_nest(key_parent, access_lists=False)
1254
+ except KeyError:
1255
+ raise toml_suffixed_err(src, pos, 'Cannot overwrite a value') from None
1256
+ if key_stem in nest:
1257
+ raise toml_suffixed_err(src, pos, f'Duplicate inline table key {key_stem!r}')
1258
+ nest[key_stem] = value
1259
+ pos = toml_skip_chars(src, pos, TOML_WS)
1260
+ c = src[pos:pos + 1]
1261
+ if c == '}':
1262
+ return pos + 1, nested_dict.dict
1263
+ if c != ',':
1264
+ raise toml_suffixed_err(src, pos, 'Unclosed inline table')
1265
+ if isinstance(value, (dict, list)):
1266
+ flags.set(key, TomlFlags.FROZEN, recursive=True)
1267
+ pos += 1
1268
+ pos = toml_skip_chars(src, pos, TOML_WS)
1270
1269
 
1271
- Uid = ta.NewType('Uid', int)
1272
- Gid = ta.NewType('Gid', int)
1273
1270
 
1271
+ def toml_parse_basic_str_escape(
1272
+ src: str,
1273
+ pos: TomlPos,
1274
+ *,
1275
+ multiline: bool = False,
1276
+ ) -> ta.Tuple[TomlPos, str]:
1277
+ escape_id = src[pos:pos + 2]
1278
+ pos += 2
1279
+ if multiline and escape_id in {'\\ ', '\\\t', '\\\n'}:
1280
+ # Skip whitespace until next non-whitespace character or end of the doc. Error if non-whitespace is found before
1281
+ # newline.
1282
+ if escape_id != '\\\n':
1283
+ pos = toml_skip_chars(src, pos, TOML_WS)
1284
+ try:
1285
+ char = src[pos]
1286
+ except IndexError:
1287
+ return pos, ''
1288
+ if char != '\n':
1289
+ raise toml_suffixed_err(src, pos, "Unescaped '\\' in a string")
1290
+ pos += 1
1291
+ pos = toml_skip_chars(src, pos, TOML_WS_AND_NEWLINE)
1292
+ return pos, ''
1293
+ if escape_id == '\\u':
1294
+ return toml_parse_hex_char(src, pos, 4)
1295
+ if escape_id == '\\U':
1296
+ return toml_parse_hex_char(src, pos, 8)
1297
+ try:
1298
+ return pos, TOML_BASIC_STR_ESCAPE_REPLACEMENTS[escape_id]
1299
+ except KeyError:
1300
+ raise toml_suffixed_err(src, pos, "Unescaped '\\' in a string") from None
1274
1301
 
1275
- ########################################
1276
- # ../utils/signals.py
1277
1302
 
1303
+ def toml_parse_basic_str_escape_multiline(src: str, pos: TomlPos) -> ta.Tuple[TomlPos, str]:
1304
+ return toml_parse_basic_str_escape(src, pos, multiline=True)
1278
1305
 
1279
- ##
1280
1306
 
1307
+ def toml_parse_hex_char(src: str, pos: TomlPos, hex_len: int) -> ta.Tuple[TomlPos, str]:
1308
+ hex_str = src[pos:pos + hex_len]
1309
+ if len(hex_str) != hex_len or not TOML_HEXDIGIT_CHARS.issuperset(hex_str):
1310
+ raise toml_suffixed_err(src, pos, 'Invalid hex value')
1311
+ pos += hex_len
1312
+ hex_int = int(hex_str, 16)
1313
+ if not toml_is_unicode_scalar_value(hex_int):
1314
+ raise toml_suffixed_err(src, pos, 'Escaped character is not a Unicode scalar value')
1315
+ return pos, chr(hex_int)
1281
1316
 
1282
- _SIGS_BY_NUM: ta.Mapping[int, signal.Signals] = {s.value: s for s in signal.Signals}
1283
- _SIGS_BY_NAME: ta.Mapping[str, signal.Signals] = {s.name: s for s in signal.Signals}
1284
1317
 
1318
+ def toml_parse_literal_str(src: str, pos: TomlPos) -> ta.Tuple[TomlPos, str]:
1319
+ pos += 1 # Skip starting apostrophe
1320
+ start_pos = pos
1321
+ pos = toml_skip_until(
1322
+ src, pos, "'", error_on=TOML_ILLEGAL_LITERAL_STR_CHARS, error_on_eof=True,
1323
+ )
1324
+ return pos + 1, src[start_pos:pos] # Skip ending apostrophe
1285
1325
 
1286
- def sig_num(value: ta.Union[int, str]) -> int:
1287
- try:
1288
- num = int(value)
1289
1326
 
1290
- except (ValueError, TypeError):
1291
- name = value.strip().upper() # type: ignore
1292
- if not name.startswith('SIG'):
1293
- name = f'SIG{name}'
1327
+ def toml_parse_multiline_str(src: str, pos: TomlPos, *, literal: bool) -> ta.Tuple[TomlPos, str]:
1328
+ pos += 3
1329
+ if src.startswith('\n', pos):
1330
+ pos += 1
1294
1331
 
1295
- if (sn := _SIGS_BY_NAME.get(name)) is None:
1296
- raise ValueError(f'value {value!r} is not a valid signal name') # noqa
1297
- num = sn
1332
+ if literal:
1333
+ delim = "'"
1334
+ end_pos = toml_skip_until(
1335
+ src,
1336
+ pos,
1337
+ "'''",
1338
+ error_on=TOML_ILLEGAL_MULTILINE_LITERAL_STR_CHARS,
1339
+ error_on_eof=True,
1340
+ )
1341
+ result = src[pos:end_pos]
1342
+ pos = end_pos + 3
1343
+ else:
1344
+ delim = '"'
1345
+ pos, result = toml_parse_basic_str(src, pos, multiline=True)
1298
1346
 
1299
- if num not in _SIGS_BY_NUM:
1300
- raise ValueError(f'value {value!r} is not a valid signal number')
1347
+ # Add at maximum two extra apostrophes/quotes if the end sequence is 4 or 5 chars long instead of just 3.
1348
+ if not src.startswith(delim, pos):
1349
+ return pos, result
1350
+ pos += 1
1351
+ if not src.startswith(delim, pos):
1352
+ return pos, result + delim
1353
+ pos += 1
1354
+ return pos, result + (delim * 2)
1301
1355
 
1302
- return num
1303
1356
 
1357
+ def toml_parse_basic_str(src: str, pos: TomlPos, *, multiline: bool) -> ta.Tuple[TomlPos, str]:
1358
+ if multiline:
1359
+ error_on = TOML_ILLEGAL_MULTILINE_BASIC_STR_CHARS
1360
+ parse_escapes = toml_parse_basic_str_escape_multiline
1361
+ else:
1362
+ error_on = TOML_ILLEGAL_BASIC_STR_CHARS
1363
+ parse_escapes = toml_parse_basic_str_escape
1364
+ result = ''
1365
+ start_pos = pos
1366
+ while True:
1367
+ try:
1368
+ char = src[pos]
1369
+ except IndexError:
1370
+ raise toml_suffixed_err(src, pos, 'Unterminated string') from None
1371
+ if char == '"':
1372
+ if not multiline:
1373
+ return pos + 1, result + src[start_pos:pos]
1374
+ if src.startswith('"""', pos):
1375
+ return pos + 3, result + src[start_pos:pos]
1376
+ pos += 1
1377
+ continue
1378
+ if char == '\\':
1379
+ result += src[start_pos:pos]
1380
+ pos, parsed_escape = parse_escapes(src, pos)
1381
+ result += parsed_escape
1382
+ start_pos = pos
1383
+ continue
1384
+ if char in error_on:
1385
+ raise toml_suffixed_err(src, pos, f'Illegal character {char!r}')
1386
+ pos += 1
1304
1387
 
1305
- def sig_name(num: int) -> str:
1306
- if (sig := _SIGS_BY_NUM.get(num)) is not None:
1307
- return sig.name
1308
- return f'signal {sig}'
1309
1388
 
1389
+ def toml_parse_value( # noqa: C901
1390
+ src: str,
1391
+ pos: TomlPos,
1392
+ parse_float: TomlParseFloat,
1393
+ ) -> ta.Tuple[TomlPos, ta.Any]:
1394
+ try:
1395
+ char: ta.Optional[str] = src[pos]
1396
+ except IndexError:
1397
+ char = None
1310
1398
 
1311
- ##
1399
+ # IMPORTANT: order conditions based on speed of checking and likelihood
1312
1400
 
1401
+ # Basic strings
1402
+ if char == '"':
1403
+ if src.startswith('"""', pos):
1404
+ return toml_parse_multiline_str(src, pos, literal=False)
1405
+ return toml_parse_one_line_basic_str(src, pos)
1313
1406
 
1314
- class SignalReceiver:
1315
- def __init__(self) -> None:
1316
- super().__init__()
1407
+ # Literal strings
1408
+ if char == "'":
1409
+ if src.startswith("'''", pos):
1410
+ return toml_parse_multiline_str(src, pos, literal=True)
1411
+ return toml_parse_literal_str(src, pos)
1317
1412
 
1318
- self._signals_recvd: ta.List[int] = []
1413
+ # Booleans
1414
+ if char == 't':
1415
+ if src.startswith('true', pos):
1416
+ return pos + 4, True
1417
+ if char == 'f':
1418
+ if src.startswith('false', pos):
1419
+ return pos + 5, False
1319
1420
 
1320
- def receive(self, sig: int, frame: ta.Any = None) -> None:
1321
- if sig not in self._signals_recvd:
1322
- self._signals_recvd.append(sig)
1421
+ # Arrays
1422
+ if char == '[':
1423
+ return toml_parse_array(src, pos, parse_float)
1323
1424
 
1324
- def install(self, *sigs: int) -> None:
1325
- for sig in sigs:
1326
- signal.signal(sig, self.receive)
1425
+ # Inline tables
1426
+ if char == '{':
1427
+ return toml_parse_inline_table(src, pos, parse_float)
1327
1428
 
1328
- def get_signal(self) -> ta.Optional[int]:
1329
- if self._signals_recvd:
1330
- sig = self._signals_recvd.pop(0)
1331
- else:
1332
- sig = None
1333
- return sig
1429
+ # Dates and times
1430
+ datetime_match = TOML_RE_DATETIME.match(src, pos)
1431
+ if datetime_match:
1432
+ try:
1433
+ datetime_obj = toml_match_to_datetime(datetime_match)
1434
+ except ValueError as e:
1435
+ raise toml_suffixed_err(src, pos, 'Invalid date or datetime') from e
1436
+ return datetime_match.end(), datetime_obj
1437
+ localtime_match = TOML_RE_LOCALTIME.match(src, pos)
1438
+ if localtime_match:
1439
+ return localtime_match.end(), toml_match_to_localtime(localtime_match)
1334
1440
 
1441
+ # Integers and "normal" floats. The regex will greedily match any type starting with a decimal char, so needs to be
1442
+ # located after handling of dates and times.
1443
+ number_match = TOML_RE_NUMBER.match(src, pos)
1444
+ if number_match:
1445
+ return number_match.end(), toml_match_to_number(number_match, parse_float)
1335
1446
 
1336
- ########################################
1337
- # ../utils/strings.py
1447
+ # Special floats
1448
+ first_three = src[pos:pos + 3]
1449
+ if first_three in {'inf', 'nan'}:
1450
+ return pos + 3, parse_float(first_three)
1451
+ first_four = src[pos:pos + 4]
1452
+ if first_four in {'-inf', '+inf', '-nan', '+nan'}:
1453
+ return pos + 4, parse_float(first_four)
1338
1454
 
1455
+ raise toml_suffixed_err(src, pos, 'Invalid value')
1339
1456
 
1340
- ##
1341
1457
 
1458
+ def toml_suffixed_err(src: str, pos: TomlPos, msg: str) -> TomlDecodeError:
1459
+ """Return a `TomlDecodeError` where error message is suffixed with coordinates in source."""
1342
1460
 
1343
- def as_bytes(s: ta.Union[str, bytes], encoding: str = 'utf8') -> bytes:
1344
- if isinstance(s, bytes):
1345
- return s
1346
- else:
1347
- return s.encode(encoding)
1461
+ def coord_repr(src: str, pos: TomlPos) -> str:
1462
+ if pos >= len(src):
1463
+ return 'end of document'
1464
+ line = src.count('\n', 0, pos) + 1
1465
+ if line == 1:
1466
+ column = pos + 1
1467
+ else:
1468
+ column = pos - src.rindex('\n', 0, pos)
1469
+ return f'line {line}, column {column}'
1348
1470
 
1471
+ return TomlDecodeError(f'{msg} (at {coord_repr(src, pos)})')
1349
1472
 
1350
- @ta.overload
1351
- def find_prefix_at_end(haystack: str, needle: str) -> int:
1352
- ...
1353
1473
 
1474
+ def toml_is_unicode_scalar_value(codepoint: int) -> bool:
1475
+ return (0 <= codepoint <= 55295) or (57344 <= codepoint <= 1114111)
1354
1476
 
1355
- @ta.overload
1356
- def find_prefix_at_end(haystack: bytes, needle: bytes) -> int:
1357
- ...
1358
1477
 
1478
+ def toml_make_safe_parse_float(parse_float: TomlParseFloat) -> TomlParseFloat:
1479
+ """A decorator to make `parse_float` safe.
1359
1480
 
1360
- def find_prefix_at_end(haystack, needle):
1361
- l = len(needle) - 1
1362
- while l and not haystack.endswith(needle[:l]):
1363
- l -= 1
1364
- return l
1481
+ `parse_float` must not return dicts or lists, because these types would be mixed with parsed TOML tables and arrays,
1482
+ thus confusing the parser. The returned decorated callable raises `ValueError` instead of returning illegal types.
1483
+ """
1484
+ # The default `float` callable never returns illegal types. Optimize it.
1485
+ if parse_float is float:
1486
+ return float
1365
1487
 
1488
+ def safe_parse_float(float_str: str) -> ta.Any:
1489
+ float_value = parse_float(float_str)
1490
+ if isinstance(float_value, (dict, list)):
1491
+ raise ValueError('parse_float must not return dicts or lists') # noqa
1492
+ return float_value
1366
1493
 
1367
- ##
1494
+ return safe_parse_float
1368
1495
 
1369
1496
 
1370
- ANSI_ESCAPE_BEGIN = b'\x1b['
1371
- ANSI_TERMINATORS = (b'H', b'f', b'A', b'B', b'C', b'D', b'R', b's', b'u', b'J', b'K', b'h', b'l', b'p', b'm')
1497
+ ########################################
1498
+ # ../../../omlish/formats/toml/writer.py
1372
1499
 
1373
1500
 
1374
- def strip_escapes(s: bytes) -> bytes:
1375
- """Remove all ANSI color escapes from the given string."""
1501
+ class TomlWriter:
1502
+ @dc.dataclass(frozen=True)
1503
+ class Literal:
1504
+ s: str
1376
1505
 
1377
- result = b''
1378
- show = 1
1379
- i = 0
1380
- l = len(s)
1381
- while i < l:
1382
- if show == 0 and s[i:i + 1] in ANSI_TERMINATORS:
1383
- show = 1
1384
- elif show:
1385
- n = s.find(ANSI_ESCAPE_BEGIN, i)
1386
- if n == -1:
1387
- return result + s[i:]
1388
- else:
1389
- result = result + s[i:n]
1390
- i = n
1391
- show = 0
1392
- i += 1
1393
- return result
1506
+ def __init__(self, out: ta.TextIO) -> None:
1507
+ super().__init__()
1508
+ self._out = out
1394
1509
 
1510
+ self._indent = 0
1511
+ self._wrote_indent = False
1395
1512
 
1396
- ##
1513
+ #
1397
1514
 
1515
+ def _w(self, s: str) -> None:
1516
+ if not self._wrote_indent:
1517
+ self._out.write(' ' * self._indent)
1518
+ self._wrote_indent = True
1519
+ self._out.write(s)
1398
1520
 
1399
- class SuffixMultiplier:
1400
- # d is a dictionary of suffixes to integer multipliers. If no suffixes match, default is the multiplier. Matches are
1401
- # case insensitive. Return values are in the fundamental unit.
1402
- def __init__(self, d, default=1):
1403
- super().__init__()
1404
- self._d = d
1405
- self._default = default
1406
- # all keys must be the same size
1407
- self._keysz = None
1408
- for k in d:
1409
- if self._keysz is None:
1410
- self._keysz = len(k)
1411
- elif self._keysz != len(k): # type: ignore
1412
- raise ValueError(k)
1521
+ def _nl(self) -> None:
1522
+ self._out.write('\n')
1523
+ self._wrote_indent = False
1413
1524
 
1414
- def __call__(self, v: ta.Union[str, int]) -> int:
1415
- if isinstance(v, int):
1416
- return v
1417
- v = v.lower()
1418
- for s, m in self._d.items():
1419
- if v[-self._keysz:] == s: # type: ignore
1420
- return int(v[:-self._keysz]) * m # type: ignore
1421
- return int(v) * self._default
1525
+ def _needs_quote(self, s: str) -> bool:
1526
+ return (
1527
+ not s or
1528
+ any(c in s for c in '\'"\n') or
1529
+ s[0] not in string.ascii_letters
1530
+ )
1422
1531
 
1532
+ def _maybe_quote(self, s: str) -> str:
1533
+ if self._needs_quote(s):
1534
+ return repr(s)
1535
+ else:
1536
+ return s
1423
1537
 
1424
- parse_bytes_size = SuffixMultiplier({
1425
- 'kb': 1024,
1426
- 'mb': 1024 * 1024,
1427
- 'gb': 1024 * 1024 * 1024,
1428
- })
1538
+ #
1429
1539
 
1540
+ def write_root(self, obj: ta.Mapping) -> None:
1541
+ for i, (k, v) in enumerate(obj.items()):
1542
+ if i:
1543
+ self._nl()
1544
+ self._w('[')
1545
+ self._w(self._maybe_quote(k))
1546
+ self._w(']')
1547
+ self._nl()
1548
+ self.write_table_contents(v)
1549
+
1550
+ def write_table_contents(self, obj: ta.Mapping) -> None:
1551
+ for k, v in obj.items():
1552
+ self.write_key(k)
1553
+ self._w(' = ')
1554
+ self.write_value(v)
1555
+ self._nl()
1556
+
1557
+ def write_array(self, obj: ta.Sequence) -> None:
1558
+ self._w('[')
1559
+ self._nl()
1560
+ self._indent += 1
1561
+ for e in obj:
1562
+ self.write_value(e)
1563
+ self._w(',')
1564
+ self._nl()
1565
+ self._indent -= 1
1566
+ self._w(']')
1567
+
1568
+ def write_inline_table(self, obj: ta.Mapping) -> None:
1569
+ self._w('{')
1570
+ for i, (k, v) in enumerate(obj.items()):
1571
+ if i:
1572
+ self._w(', ')
1573
+ self.write_key(k)
1574
+ self._w(' = ')
1575
+ self.write_value(v)
1576
+ self._w('}')
1577
+
1578
+ def write_inline_array(self, obj: ta.Sequence) -> None:
1579
+ self._w('[')
1580
+ for i, e in enumerate(obj):
1581
+ if i:
1582
+ self._w(', ')
1583
+ self.write_value(e)
1584
+ self._w(']')
1585
+
1586
+ def write_key(self, obj: ta.Any) -> None:
1587
+ if isinstance(obj, TomlWriter.Literal):
1588
+ self._w(obj.s)
1589
+ elif isinstance(obj, str):
1590
+ self._w(self._maybe_quote(obj.replace('_', '-')))
1591
+ elif isinstance(obj, int):
1592
+ self._w(repr(str(obj)))
1593
+ else:
1594
+ raise TypeError(obj)
1430
1595
 
1431
- #
1596
+ def write_value(self, obj: ta.Any) -> None:
1597
+ if isinstance(obj, bool):
1598
+ self._w(str(obj).lower())
1599
+ elif isinstance(obj, (str, int, float)):
1600
+ self._w(repr(obj))
1601
+ elif isinstance(obj, ta.Mapping):
1602
+ self.write_inline_table(obj)
1603
+ elif isinstance(obj, ta.Sequence):
1604
+ if not obj:
1605
+ self.write_inline_array(obj)
1606
+ else:
1607
+ self.write_array(obj)
1608
+ else:
1609
+ raise TypeError(obj)
1432
1610
 
1611
+ #
1433
1612
 
1434
- def parse_octal(arg: ta.Union[str, int]) -> int:
1435
- if isinstance(arg, int):
1436
- return arg
1437
- try:
1438
- return int(arg, 8)
1439
- except (TypeError, ValueError):
1440
- raise ValueError(f'{arg} can not be converted to an octal type') # noqa
1613
+ @classmethod
1614
+ def write_str(cls, obj: ta.Any) -> str:
1615
+ out = io.StringIO()
1616
+ cls(out).write_value(obj)
1617
+ return out.getvalue()
1441
1618
 
1442
1619
 
1443
1620
  ########################################
@@ -3069,61 +3246,338 @@ def waitpid(
3069
3246
  ##
3070
3247
 
3071
3248
 
3072
- def name_to_uid(name: str) -> Uid:
3073
- try:
3074
- uid = int(name)
3075
- except ValueError:
3076
- try:
3077
- pwdrec = pwd.getpwnam(name)
3078
- except KeyError:
3079
- raise ValueError(f'Invalid user name {name}') # noqa
3080
- uid = pwdrec[2]
3081
- else:
3082
- try:
3083
- pwd.getpwuid(uid) # check if uid is valid
3084
- except KeyError:
3085
- raise ValueError(f'Invalid user id {name}') # noqa
3086
- return Uid(uid)
3249
+ def name_to_uid(name: str) -> Uid:
3250
+ try:
3251
+ uid = int(name)
3252
+ except ValueError:
3253
+ try:
3254
+ pwdrec = pwd.getpwnam(name)
3255
+ except KeyError:
3256
+ raise ValueError(f'Invalid user name {name}') # noqa
3257
+ uid = pwdrec[2]
3258
+ else:
3259
+ try:
3260
+ pwd.getpwuid(uid) # check if uid is valid
3261
+ except KeyError:
3262
+ raise ValueError(f'Invalid user id {name}') # noqa
3263
+ return Uid(uid)
3264
+
3265
+
3266
+ def name_to_gid(name: str) -> Gid:
3267
+ try:
3268
+ gid = int(name)
3269
+ except ValueError:
3270
+ try:
3271
+ grprec = grp.getgrnam(name)
3272
+ except KeyError:
3273
+ raise ValueError(f'Invalid group name {name}') # noqa
3274
+ gid = grprec[2]
3275
+ else:
3276
+ try:
3277
+ grp.getgrgid(gid) # check if gid is valid
3278
+ except KeyError:
3279
+ raise ValueError(f'Invalid group id {name}') # noqa
3280
+ return Gid(gid)
3281
+
3282
+
3283
+ def gid_for_uid(uid: Uid) -> Gid:
3284
+ pwrec = pwd.getpwuid(uid)
3285
+ return Gid(pwrec[3])
3286
+
3287
+
3288
+ ##
3289
+
3290
+
3291
+ @dc.dataclass(frozen=True)
3292
+ class User:
3293
+ name: str
3294
+ uid: Uid
3295
+ gid: Gid
3296
+
3297
+
3298
+ def get_user(name: str) -> User:
3299
+ return User(
3300
+ name=name,
3301
+ uid=(uid := name_to_uid(name)),
3302
+ gid=gid_for_uid(uid),
3303
+ )
3304
+
3305
+
3306
+ ########################################
3307
+ # ../../../omlish/configs/formats.py
3308
+ """
3309
+ Notes:
3310
+ - necessarily string-oriented
3311
+ - single file, as this is intended to be amalg'd and thus all included anyway
3312
+
3313
+ TODO:
3314
+ - ConfigDataMapper? to_map -> ConfigMap?
3315
+ - nginx ?
3316
+ - raw ?
3317
+ """
3318
+
3319
+
3320
+ ##
3321
+
3322
+
3323
+ @dc.dataclass(frozen=True)
3324
+ class ConfigData(abc.ABC): # noqa
3325
+ @abc.abstractmethod
3326
+ def as_map(self) -> ConfigMap:
3327
+ raise NotImplementedError
3328
+
3329
+
3330
+ #
3331
+
3332
+
3333
+ class ConfigLoader(abc.ABC, ta.Generic[ConfigDataT]):
3334
+ @property
3335
+ def file_exts(self) -> ta.Sequence[str]:
3336
+ return ()
3337
+
3338
+ def match_file(self, n: str) -> bool:
3339
+ return '.' in n and n.split('.')[-1] in check.not_isinstance(self.file_exts, str)
3340
+
3341
+ #
3342
+
3343
+ def load_file(self, p: str) -> ConfigDataT:
3344
+ with open(p) as f:
3345
+ return self.load_str(f.read())
3346
+
3347
+ @abc.abstractmethod
3348
+ def load_str(self, s: str) -> ConfigDataT:
3349
+ raise NotImplementedError
3350
+
3351
+
3352
+ #
3353
+
3354
+
3355
+ class ConfigRenderer(abc.ABC, ta.Generic[ConfigDataT]):
3356
+ @property
3357
+ @abc.abstractmethod
3358
+ def data_cls(self) -> ta.Type[ConfigDataT]:
3359
+ raise NotImplementedError
3360
+
3361
+ def match_data(self, d: ConfigDataT) -> bool:
3362
+ return isinstance(d, self.data_cls)
3363
+
3364
+ #
3365
+
3366
+ @abc.abstractmethod
3367
+ def render(self, d: ConfigDataT) -> str:
3368
+ raise NotImplementedError
3369
+
3370
+
3371
+ ##
3372
+
3373
+
3374
+ @dc.dataclass(frozen=True)
3375
+ class ObjConfigData(ConfigData, abc.ABC):
3376
+ obj: ta.Any
3377
+
3378
+ def as_map(self) -> ConfigMap:
3379
+ return check.isinstance(self.obj, collections.abc.Mapping)
3380
+
3381
+
3382
+ ##
3383
+
3384
+
3385
+ @dc.dataclass(frozen=True)
3386
+ class JsonConfigData(ObjConfigData):
3387
+ pass
3388
+
3389
+
3390
+ class JsonConfigLoader(ConfigLoader[JsonConfigData]):
3391
+ file_exts = ('json',)
3392
+
3393
+ def load_str(self, s: str) -> JsonConfigData:
3394
+ return JsonConfigData(json.loads(s))
3395
+
3396
+
3397
+ class JsonConfigRenderer(ConfigRenderer[JsonConfigData]):
3398
+ data_cls = JsonConfigData
3399
+
3400
+ def render(self, d: JsonConfigData) -> str:
3401
+ return json_dumps_pretty(d.obj)
3402
+
3403
+
3404
+ ##
3405
+
3406
+
3407
+ @dc.dataclass(frozen=True)
3408
+ class TomlConfigData(ObjConfigData):
3409
+ pass
3410
+
3411
+
3412
+ class TomlConfigLoader(ConfigLoader[TomlConfigData]):
3413
+ file_exts = ('toml',)
3414
+
3415
+ def load_str(self, s: str) -> TomlConfigData:
3416
+ return TomlConfigData(toml_loads(s))
3417
+
3418
+
3419
+ class TomlConfigRenderer(ConfigRenderer[TomlConfigData]):
3420
+ data_cls = TomlConfigData
3421
+
3422
+ def render(self, d: TomlConfigData) -> str:
3423
+ return TomlWriter.write_str(d.obj)
3424
+
3425
+
3426
+ ##
3427
+
3428
+
3429
+ @dc.dataclass(frozen=True)
3430
+ class YamlConfigData(ObjConfigData):
3431
+ pass
3432
+
3433
+
3434
+ class YamlConfigLoader(ConfigLoader[YamlConfigData]):
3435
+ file_exts = ('yaml', 'yml')
3436
+
3437
+ def load_str(self, s: str) -> YamlConfigData:
3438
+ return YamlConfigData(__import__('yaml').safe_load(s))
3439
+
3440
+
3441
+ class YamlConfigRenderer(ConfigRenderer[YamlConfigData]):
3442
+ data_cls = YamlConfigData
3443
+
3444
+ def render(self, d: YamlConfigData) -> str:
3445
+ return __import__('yaml').safe_dump(d.obj)
3446
+
3447
+
3448
+ ##
3449
+
3450
+
3451
+ @dc.dataclass(frozen=True)
3452
+ class IniConfigData(ConfigData):
3453
+ sections: IniSectionSettingsMap
3454
+
3455
+ def as_map(self) -> ConfigMap:
3456
+ return self.sections
3457
+
3458
+
3459
+ class IniConfigLoader(ConfigLoader[IniConfigData]):
3460
+ file_exts = ('ini',)
3461
+
3462
+ def load_str(self, s: str) -> IniConfigData:
3463
+ cp = configparser.ConfigParser()
3464
+ cp.read_string(s)
3465
+ return IniConfigData(extract_ini_sections(cp))
3466
+
3467
+
3468
+ class IniConfigRenderer(ConfigRenderer[IniConfigData]):
3469
+ data_cls = IniConfigData
3470
+
3471
+ def render(self, d: IniConfigData) -> str:
3472
+ return render_ini_sections(d.sections)
3473
+
3474
+
3475
+ ##
3476
+
3477
+
3478
+ @dc.dataclass(frozen=True)
3479
+ class SwitchedConfigFileLoader:
3480
+ loaders: ta.Sequence[ConfigLoader]
3481
+ default: ta.Optional[ConfigLoader] = None
3482
+
3483
+ def load_file(self, p: str) -> ConfigData:
3484
+ n = os.path.basename(p)
3485
+
3486
+ for l in self.loaders:
3487
+ if l.match_file(n):
3488
+ return l.load_file(p)
3489
+
3490
+ if (d := self.default) is not None:
3491
+ return d.load_file(p)
3492
+
3493
+ raise NameError(n)
3494
+
3495
+
3496
+ DEFAULT_CONFIG_LOADERS: ta.Sequence[ConfigLoader] = [
3497
+ JsonConfigLoader(),
3498
+ TomlConfigLoader(),
3499
+ YamlConfigLoader(),
3500
+ IniConfigLoader(),
3501
+ ]
3502
+
3503
+ DEFAULT_CONFIG_LOADER: ConfigLoader = JsonConfigLoader()
3504
+
3505
+ DEFAULT_CONFIG_FILE_LOADER = SwitchedConfigFileLoader(
3506
+ loaders=DEFAULT_CONFIG_LOADERS,
3507
+ default=DEFAULT_CONFIG_LOADER,
3508
+ )
3509
+
3510
+
3511
+ ##
3512
+
3513
+
3514
+ @dc.dataclass(frozen=True)
3515
+ class SwitchedConfigRenderer:
3516
+ renderers: ta.Sequence[ConfigRenderer]
3517
+
3518
+ def render(self, d: ConfigData) -> str:
3519
+ for r in self.renderers:
3520
+ if r.match_data(d):
3521
+ return r.render(d)
3522
+ raise TypeError(d)
3523
+
3087
3524
 
3525
+ DEFAULT_CONFIG_RENDERERS: ta.Sequence[ConfigRenderer] = [
3526
+ JsonConfigRenderer(),
3527
+ TomlConfigRenderer(),
3528
+ YamlConfigRenderer(),
3529
+ IniConfigRenderer(),
3530
+ ]
3088
3531
 
3089
- def name_to_gid(name: str) -> Gid:
3090
- try:
3091
- gid = int(name)
3092
- except ValueError:
3093
- try:
3094
- grprec = grp.getgrnam(name)
3095
- except KeyError:
3096
- raise ValueError(f'Invalid group name {name}') # noqa
3097
- gid = grprec[2]
3098
- else:
3099
- try:
3100
- grp.getgrgid(gid) # check if gid is valid
3101
- except KeyError:
3102
- raise ValueError(f'Invalid group id {name}') # noqa
3103
- return Gid(gid)
3532
+ DEFAULT_CONFIG_RENDERER = SwitchedConfigRenderer(DEFAULT_CONFIG_RENDERERS)
3104
3533
 
3105
3534
 
3106
- def gid_for_uid(uid: Uid) -> Gid:
3107
- pwrec = pwd.getpwuid(uid)
3108
- return Gid(pwrec[3])
3535
+ ########################################
3536
+ # ../../../omlish/configs/processing/names.py
3537
+ """
3538
+ usecase: supervisor process groups
3539
+ """
3109
3540
 
3110
3541
 
3111
3542
  ##
3112
3543
 
3113
3544
 
3114
- @dc.dataclass(frozen=True)
3115
- class User:
3116
- name: str
3117
- uid: Uid
3118
- gid: Gid
3545
+ def build_config_named_children(
3546
+ o: ta.Union[
3547
+ ta.Sequence[ConfigMap],
3548
+ ta.Mapping[str, ConfigMap],
3549
+ None,
3550
+ ],
3551
+ *,
3552
+ name_key: str = 'name',
3553
+ ) -> ta.Optional[ta.Sequence[ConfigMap]]:
3554
+ if o is None:
3555
+ return None
3119
3556
 
3557
+ lst: ta.List[ConfigMap] = []
3558
+ if isinstance(o, ta.Mapping):
3559
+ for k, v in o.items():
3560
+ check.isinstance(v, ta.Mapping)
3561
+ if name_key in v:
3562
+ n = v[name_key]
3563
+ if k != n:
3564
+ raise KeyError(f'Given names do not match: {n} != {k}')
3565
+ lst.append(v)
3566
+ else:
3567
+ lst.append({name_key: k, **v})
3120
3568
 
3121
- def get_user(name: str) -> User:
3122
- return User(
3123
- name=name,
3124
- uid=(uid := name_to_uid(name)),
3125
- gid=gid_for_uid(uid),
3126
- )
3569
+ else:
3570
+ check.not_isinstance(o, str)
3571
+ lst.extend(o)
3572
+
3573
+ seen = set()
3574
+ for d in lst:
3575
+ n = d['name']
3576
+ if n in d:
3577
+ raise KeyError(f'Duplicate name: {n}')
3578
+ seen.add(n)
3579
+
3580
+ return lst
3127
3581
 
3128
3582
 
3129
3583
  ########################################
@@ -5858,119 +6312,303 @@ class SocketHandler(abc.ABC):
5858
6312
  raise NotImplementedError
5859
6313
 
5860
6314
 
5861
- ########################################
5862
- # ../../configs.py
6315
+ ########################################
6316
+ # ../configs.py
6317
+
6318
+
6319
+ ##
6320
+
6321
+
6322
+ class RestartWhenExitUnexpected:
6323
+ pass
6324
+
6325
+
6326
+ class RestartUnconditionally:
6327
+ pass
6328
+
6329
+
6330
+ ##
6331
+
6332
+
6333
+ @dc.dataclass(frozen=True)
6334
+ class ProcessConfig:
6335
+ # A Python string expression that is used to compose the supervisor process name for this process. You usually don't
6336
+ # need to worry about setting this unless you change numprocs. The string expression is evaluated against a
6337
+ # dictionary that includes group_name, host_node_name, process_num, program_name, and here (the directory of the
6338
+ # supervisord config file).
6339
+ name: str
6340
+
6341
+ # The command that will be run when this program is started. The command can be either absolute (e.g.
6342
+ # /path/to/programname) or relative (e.g. programname). If it is relative, the supervisord's environment $PATH will
6343
+ # be searched for the executable. Programs can accept arguments, e.g. /path/to/program foo bar. The command line can
6344
+ # use double quotes to group arguments with spaces in them to pass to the program, e.g. /path/to/program/name -p
6345
+ # "foo bar". Note that the value of command may include Python string expressions, e.g. /path/to/programname
6346
+ # --port=80%(process_num)02d might expand to /path/to/programname --port=8000 at runtime. String expressions are
6347
+ # evaluated against a dictionary containing the keys group_name, host_node_name, program_name, process_num,
6348
+ # numprocs, here (the directory of the supervisord config file), and all supervisord's environment variables
6349
+ # prefixed with ENV_. Controlled programs should themselves not be daemons, as supervisord assumes it is responsible
6350
+ # for daemonizing its subprocesses
6351
+ command: str
6352
+
6353
+ #
6354
+
6355
+ # Supervisor will start as many instances of this program as named by numprocs. Note that if numprocs > 1, the
6356
+ # process_name expression must include %(process_num)s (or any other valid Python string expression that includes
6357
+ # process_num) within it.
6358
+ num_procs: int = 1
6359
+
6360
+ # An integer offset that is used to compute the number at which process_num starts.
6361
+ num_procs_start: int = 0
6362
+
6363
+ #
6364
+
6365
+ # Instruct supervisord to use this UNIX user account as the account which runs the program. The user can only be
6366
+ # switched if supervisord is run as the root user. If supervisord can't switch to the specified user, the program
6367
+ # will not be started.
6368
+ #
6369
+ # Note: The user will be changed using setuid only. This does not start a login shell and does not change
6370
+ # environment variables like USER or HOME
6371
+ user: ta.Optional[str] = None
6372
+ uid: ta.Optional[int] = None
6373
+
6374
+ # An octal number (e.g. 002, 022) representing the umask of the process.
6375
+ umask: ta.Optional[int] = None
6376
+
6377
+ #
6378
+
6379
+ # A file path representing a directory to which supervisord should temporarily chdir before exec'ing the child.
6380
+ directory: ta.Optional[str] = None
6381
+
6382
+ # A list of key/value pairs in the form KEY="val",KEY2="val2" that will be placed in the child process' environment.
6383
+ # The environment string may contain Python string expressions that will be evaluated against a dictionary
6384
+ # containing group_name, host_node_name, process_num, program_name, and here (the directory of the supervisord
6385
+ # config file). Values containing non-alphanumeric characters should be quoted (e.g. KEY="val:123",KEY2="val,456").
6386
+ # Otherwise, quoting the values is optional but recommended. Note that the subprocess will inherit the environment
6387
+ # variables of the shell used to start “supervisord” except for the ones overridden here.
6388
+ environment: ta.Optional[ta.Mapping[str, str]] = None
6389
+
6390
+ #
6391
+
6392
+ # The relative priority of the program in the start and shutdown ordering. Lower priorities indicate programs that
6393
+ # start first and shut down last at startup and when aggregate commands are used in various clients (e.g. “start
6394
+ # all”/”stop all”). Higher priorities indicate programs that start last and shut down first.
6395
+ priority: int = 999
6396
+
6397
+ # If true, this program will start automatically when supervisord is started.
6398
+ auto_start: bool = True
6399
+
6400
+ # Specifies if supervisord should automatically restart a process if it exits when it is in the RUNNING state. May
6401
+ # be one of false, unexpected, or true. If false, the process will not be autorestarted. If unexpected, the process
6402
+ # will be restarted when the program exits with an exit code that is not one of the exit codes associated with this
6403
+ # process' configuration (see exitcodes). If true, the process will be unconditionally restarted when it exits,
6404
+ # without regard to its exit code.
6405
+ #
6406
+ # Note: autorestart controls whether supervisord will autorestart a program if it exits after it has successfully
6407
+ # started up (the process is in the RUNNING state). supervisord has a different restart mechanism for when the
6408
+ # process is starting up (the process is in the STARTING state). Retries during process startup are controlled by
6409
+ # startsecs and startretries.
6410
+ auto_restart: str = 'unexpected'
6411
+
6412
+ # The total number of seconds which the program needs to stay running after a startup to consider the start
6413
+ # successful (moving the process from the STARTING state to the RUNNING state). Set to 0 to indicate that the
6414
+ # program needn't stay running for any particular amount of time.
6415
+ #
6416
+ # Note: Even if a process exits with an “expected” exit code (see exitcodes), the start will still be considered a
6417
+ # failure if the process exits quicker than startsecs.
6418
+ start_secs: int = 1
6419
+
6420
+ # The number of serial failure attempts that supervisord will allow when attempting to start the program before
6421
+ # giving up and putting the process into an FATAL state.
6422
+ #
6423
+ # Note: After each failed restart, process will be put in BACKOFF state and each retry attempt will take
6424
+ # increasingly more time.
6425
+ start_retries: int = 3
6426
+
6427
+ # The signal used to kill the program when a stop is requested. This can be specified using the signal's name or its
6428
+ # number. It is normally one of: TERM, HUP, INT, QUIT, KILL, USR1, or USR2.
6429
+ stop_signal: int = signal.SIGTERM
6430
+
6431
+ # The number of seconds to wait for the OS to return a SIGCHLD to supervisord after the program has been sent a
6432
+ # stopsignal. If this number of seconds elapses before supervisord receives a SIGCHLD from the process, supervisord
6433
+ # will attempt to kill it with a final SIGKILL.
6434
+ stop_wait_secs: int = 10
6435
+
6436
+ # If true, the flag causes supervisor to send the stop signal to the whole process group and implies killasgroup is
6437
+ # true. This is useful for programs, such as Flask in debug mode, that do not propagate stop signals to their
6438
+ # children, leaving them orphaned.
6439
+ stop_as_group: bool = False
6440
+
6441
+ # If true, when resorting to send SIGKILL to the program to terminate it send it to its whole process group instead,
6442
+ # taking care of its children as well, useful e.g with Python programs using multiprocessing.
6443
+ kill_as_group: bool = False
6444
+
6445
+ # The list of “expected” exit codes for this program used with autorestart. If the autorestart parameter is set to
6446
+ # unexpected, and the process exits in any other way than as a result of a supervisor stop request, supervisord will
6447
+ # restart the process if it exits with an exit code that is not defined in this list.
6448
+ #
6449
+ # Note: In Supervisor versions prior to 4.0, the default was 0,2. In Supervisor 4.0, the default was changed to 0.
6450
+ exitcodes: ta.Sequence[int] = (0,)
6451
+
6452
+ #
6453
+
6454
+ @dc.dataclass(frozen=True)
6455
+ class Log:
6456
+ file: ta.Optional[str] = None
6457
+ capture_max_bytes: ta.Optional[int] = None
6458
+ events_enabled: bool = False
6459
+ syslog: bool = False
6460
+ backups: ta.Optional[int] = None
6461
+ max_bytes: ta.Optional[int] = None
6462
+
6463
+ stdout: Log = Log()
6464
+ stderr: Log = Log()
6465
+
6466
+ # If true, cause the process' stderr output to be sent back to supervisord on its stdout file descriptor (in UNIX
6467
+ # shell terms, this is the equivalent of executing /the/program 2>&1).
6468
+ #
6469
+ # Note: Do not set redirect_stderr=true in an [eventlistener:x] section. Eventlisteners use stdout and stdin to
6470
+ # communicate with supervisord. If stderr is redirected, output from stderr will interfere with the eventlistener
6471
+ # protocol.
6472
+ redirect_stderr: bool = False
6473
+
6474
+
6475
+ @dc.dataclass(frozen=True)
6476
+ class ProcessGroupConfig:
6477
+ name: str
6478
+
6479
+ priority: int = 999
6480
+
6481
+ processes: ta.Optional[ta.Sequence[ProcessConfig]] = None
6482
+
6483
+
6484
+ @dc.dataclass(frozen=True)
6485
+ class ServerConfig:
6486
+ # Instruct supervisord to switch users to this UNIX user account before doing any meaningful processing. The user
6487
+ # can only be switched if supervisord is started as the root user.
6488
+ user: ta.Optional[str] = None
6489
+
6490
+ # If true, supervisord will start in the foreground instead of daemonizing.
6491
+ nodaemon: bool = False
6492
+
6493
+ # The umask of the supervisord process.
6494
+ umask: int = 0o22
6495
+
6496
+ #
5863
6497
 
6498
+ # When supervisord daemonizes, switch to this directory. This option can include the value %(here)s, which expands
6499
+ # to the directory in which the supervisord configuration file was found.
6500
+ directory: ta.Optional[str] = None
5864
6501
 
5865
- ##
6502
+ # The location in which supervisord keeps its pid file. This option can include the value %(here)s, which expands to
6503
+ # the directory in which the supervisord configuration file was found.
6504
+ pidfile: str = 'supervisord.pid'
5866
6505
 
6506
+ # The identifier string for this supervisor process, used by the RPC interface.
6507
+ identifier: str = 'supervisor'
5867
6508
 
5868
- def parse_config_file(
5869
- name: str,
5870
- f: ta.TextIO,
5871
- ) -> ConfigMapping:
5872
- if name.endswith('.toml'):
5873
- return toml_loads(f.read())
6509
+ # The minimum number of file descriptors that must be available before supervisord will start successfully.
6510
+ min_fds: int = 1024
6511
+ # The minimum number of process descriptors that must be available before supervisord will start successfully.
6512
+ min_procs: int = 200
5874
6513
 
5875
- elif any(name.endswith(e) for e in ('.yml', '.yaml')):
5876
- yaml = __import__('yaml')
5877
- return yaml.safe_load(f)
6514
+ # Prevent supervisord from clearing any existing AUTO child log files at startup time. Useful for debugging
6515
+ nocleanup: bool = False
5878
6516
 
5879
- elif name.endswith('.ini'):
5880
- import configparser
5881
- cp = configparser.ConfigParser()
5882
- cp.read_file(f)
5883
- config_dct: ta.Dict[str, ta.Any] = {}
5884
- for sec in cp.sections():
5885
- cd = config_dct
5886
- for k in sec.split('.'):
5887
- cd = cd.setdefault(k, {})
5888
- cd.update(cp.items(sec))
5889
- return config_dct
6517
+ # Strip all ANSI escape sequences from child log files.
6518
+ strip_ansi: bool = False
5890
6519
 
5891
- else:
5892
- return json.loads(f.read())
6520
+ #
5893
6521
 
6522
+ # The path to the activity log of the supervisord process. This option can include the value %(here)s, which expands
6523
+ # to the directory in which the supervisord configuration file was found.
6524
+ logfile: str = 'supervisord.log'
5894
6525
 
5895
- def read_config_file(
5896
- path: str,
5897
- cls: ta.Type[T],
5898
- *,
5899
- prepare: ta.Optional[ta.Callable[[ConfigMapping], ConfigMapping]] = None,
5900
- msh: ObjMarshalerManager = OBJ_MARSHALER_MANAGER,
5901
- ) -> T:
5902
- with open(path) as cf:
5903
- config_dct = parse_config_file(os.path.basename(path), cf)
6526
+ # The maximum number of bytes that may be consumed by the activity log file before it is rotated (suffix multipliers
6527
+ # like “KB”, “MB”, and “GB” can be used in the value). Set this value to 0 to indicate an unlimited log size.
6528
+ logfile_max_bytes: int = 50 * 1024 * 1024
5904
6529
 
5905
- if prepare is not None:
5906
- config_dct = prepare(config_dct)
6530
+ # The number of backups to keep around resulting from activity log file rotation. If set to 0, no backups will be
6531
+ # kept.
6532
+ logfile_backups: int = 10
5907
6533
 
5908
- return msh.unmarshal_obj(config_dct, cls)
6534
+ # The logging level, dictating what is written to the supervisord activity log. One of critical, error, warn, info,
6535
+ # debug, trace, or blather. Note that at log level debug, the supervisord log file will record the stderr/stdout
6536
+ # output of its child processes and extended info about process state changes, which is useful for debugging a
6537
+ # process which isn't starting properly.
6538
+ loglevel: int = logging.INFO
5909
6539
 
6540
+ # The directory used for AUTO child log files. This option can include the value %(here)s, which expands to the
6541
+ # directory in which the supervisord configuration file was found.
6542
+ child_logdir: str = '/dev/null'
5910
6543
 
5911
- ##
6544
+ # If true and not daemonized, logs will not be directed to stdout.
6545
+ silent: bool = False
5912
6546
 
6547
+ #
5913
6548
 
5914
- def build_config_named_children(
5915
- o: ta.Union[
5916
- ta.Sequence[ConfigMapping],
5917
- ta.Mapping[str, ConfigMapping],
5918
- None,
5919
- ],
5920
- *,
5921
- name_key: str = 'name',
5922
- ) -> ta.Optional[ta.Sequence[ConfigMapping]]:
5923
- if o is None:
5924
- return None
6549
+ groups: ta.Optional[ta.Sequence[ProcessGroupConfig]] = None
5925
6550
 
5926
- lst: ta.List[ConfigMapping] = []
5927
- if isinstance(o, ta.Mapping):
5928
- for k, v in o.items():
5929
- check.isinstance(v, ta.Mapping)
5930
- if name_key in v:
5931
- n = v[name_key]
5932
- if k != n:
5933
- raise KeyError(f'Given names do not match: {n} != {k}')
5934
- lst.append(v)
5935
- else:
5936
- lst.append({name_key: k, **v})
6551
+ # TODO: implement - make sure to accept broken symlinks
6552
+ group_config_dirs: ta.Optional[ta.Sequence[str]] = None
5937
6553
 
5938
- else:
5939
- check.not_isinstance(o, str)
5940
- lst.extend(o)
6554
+ #
5941
6555
 
5942
- seen = set()
5943
- for d in lst:
5944
- n = d['name']
5945
- if n in d:
5946
- raise KeyError(f'Duplicate name: {n}')
5947
- seen.add(n)
6556
+ http_port: ta.Optional[int] = None
5948
6557
 
5949
- return lst
6558
+ #
6559
+
6560
+ @classmethod
6561
+ def new(
6562
+ cls,
6563
+ *,
6564
+ umask: ta.Union[int, str] = 0o22,
6565
+ directory: ta.Optional[str] = None,
6566
+ logfile: str = 'supervisord.log',
6567
+ logfile_max_bytes: ta.Union[int, str] = 50 * 1024 * 1024,
6568
+ loglevel: ta.Union[int, str] = logging.INFO,
6569
+ pidfile: str = 'supervisord.pid',
6570
+ child_logdir: ta.Optional[str] = None,
6571
+ **kwargs: ta.Any,
6572
+ ) -> 'ServerConfig':
6573
+ return cls(
6574
+ umask=parse_octal(umask),
6575
+ directory=check_existing_dir(directory) if directory is not None else None,
6576
+ logfile=check_path_with_existing_dir(logfile),
6577
+ logfile_max_bytes=parse_bytes_size(logfile_max_bytes),
6578
+ loglevel=parse_logging_level(loglevel),
6579
+ pidfile=check_path_with_existing_dir(pidfile),
6580
+ child_logdir=child_logdir if child_logdir else tempfile.gettempdir(),
6581
+ **kwargs,
6582
+ )
5950
6583
 
5951
6584
 
5952
6585
  ##
5953
6586
 
5954
6587
 
5955
- def render_ini_config(
5956
- settings_by_section: IniConfigSectionSettingsMap,
5957
- ) -> str:
5958
- out = io.StringIO()
6588
+ def prepare_process_group_config(dct: ConfigMap) -> ConfigMap:
6589
+ out = dict(dct)
6590
+ out['processes'] = build_config_named_children(out.get('processes'))
6591
+ return out
5959
6592
 
5960
- for i, (section, settings) in enumerate(settings_by_section.items()):
5961
- if i:
5962
- out.write('\n')
5963
6593
 
5964
- out.write(f'[{section}]\n')
6594
+ def prepare_server_config(dct: ta.Mapping[str, ta.Any]) -> ta.Mapping[str, ta.Any]:
6595
+ out = dict(dct)
6596
+ group_dcts = build_config_named_children(out.get('groups'))
6597
+ out['groups'] = [prepare_process_group_config(group_dct) for group_dct in group_dcts or []]
6598
+ return out
5965
6599
 
5966
- for k, v in settings.items():
5967
- if isinstance(v, str):
5968
- out.write(f'{k}={v}\n')
5969
- else:
5970
- for vv in v:
5971
- out.write(f'{k}={vv}\n')
5972
6600
 
5973
- return out.getvalue()
6601
+ ##
6602
+
6603
+
6604
+ def parse_logging_level(value: ta.Union[str, int]) -> int:
6605
+ if isinstance(value, int):
6606
+ return value
6607
+ s = str(value).lower()
6608
+ level = logging.getLevelNamesMapping().get(s.upper())
6609
+ if level is None:
6610
+ raise ValueError(f'bad logging level name {value!r}')
6611
+ return level
5974
6612
 
5975
6613
 
5976
6614
  ########################################
@@ -6120,6 +6758,38 @@ class UnsupportedMethodHttpHandlerError(Exception):
6120
6758
  pass
6121
6759
 
6122
6760
 
6761
+ ########################################
6762
+ # ../../../omlish/lite/configs.py
6763
+
6764
+
6765
+ ##
6766
+
6767
+
6768
+ def load_config_file_obj(
6769
+ f: str,
6770
+ cls: ta.Type[T],
6771
+ *,
6772
+ prepare: ta.Union[
6773
+ ta.Callable[[ConfigMap], ConfigMap],
6774
+ ta.Iterable[ta.Callable[[ConfigMap], ConfigMap]],
6775
+ ] = (),
6776
+ msh: ObjMarshalerManager = OBJ_MARSHALER_MANAGER,
6777
+ ) -> T:
6778
+ config_data = DEFAULT_CONFIG_FILE_LOADER.load_file(f)
6779
+
6780
+ config_dct = config_data.as_map()
6781
+
6782
+ if prepare is not None:
6783
+ if isinstance(prepare, ta.Iterable):
6784
+ pfs = list(prepare)
6785
+ else:
6786
+ pfs = [prepare]
6787
+ for pf in pfs:
6788
+ config_dct = pf(config_dct)
6789
+
6790
+ return msh.unmarshal_obj(config_dct, cls)
6791
+
6792
+
6123
6793
  ########################################
6124
6794
  # ../../../omlish/logs/standard.py
6125
6795
  """
@@ -6242,303 +6912,184 @@ def configure_standard_logging(
6242
6912
  return StandardConfiguredLogHandler(handler)
6243
6913
 
6244
6914
 
6245
- ########################################
6246
- # ../configs.py
6247
-
6248
-
6249
- ##
6250
-
6251
-
6252
- class RestartWhenExitUnexpected:
6253
- pass
6254
-
6255
-
6256
- class RestartUnconditionally:
6257
- pass
6258
-
6259
-
6260
- ##
6261
-
6262
-
6263
- @dc.dataclass(frozen=True)
6264
- class ProcessConfig:
6265
- # A Python string expression that is used to compose the supervisor process name for this process. You usually don't
6266
- # need to worry about setting this unless you change numprocs. The string expression is evaluated against a
6267
- # dictionary that includes group_name, host_node_name, process_num, program_name, and here (the directory of the
6268
- # supervisord config file).
6269
- name: str
6270
-
6271
- # The command that will be run when this program is started. The command can be either absolute (e.g.
6272
- # /path/to/programname) or relative (e.g. programname). If it is relative, the supervisord's environment $PATH will
6273
- # be searched for the executable. Programs can accept arguments, e.g. /path/to/program foo bar. The command line can
6274
- # use double quotes to group arguments with spaces in them to pass to the program, e.g. /path/to/program/name -p
6275
- # "foo bar". Note that the value of command may include Python string expressions, e.g. /path/to/programname
6276
- # --port=80%(process_num)02d might expand to /path/to/programname --port=8000 at runtime. String expressions are
6277
- # evaluated against a dictionary containing the keys group_name, host_node_name, program_name, process_num,
6278
- # numprocs, here (the directory of the supervisord config file), and all supervisord's environment variables
6279
- # prefixed with ENV_. Controlled programs should themselves not be daemons, as supervisord assumes it is responsible
6280
- # for daemonizing its subprocesses
6281
- command: str
6282
-
6283
- #
6284
-
6285
- # Supervisor will start as many instances of this program as named by numprocs. Note that if numprocs > 1, the
6286
- # process_name expression must include %(process_num)s (or any other valid Python string expression that includes
6287
- # process_num) within it.
6288
- num_procs: int = 1
6289
-
6290
- # An integer offset that is used to compute the number at which process_num starts.
6291
- num_procs_start: int = 0
6292
-
6293
- #
6294
-
6295
- # Instruct supervisord to use this UNIX user account as the account which runs the program. The user can only be
6296
- # switched if supervisord is run as the root user. If supervisord can't switch to the specified user, the program
6297
- # will not be started.
6298
- #
6299
- # Note: The user will be changed using setuid only. This does not start a login shell and does not change
6300
- # environment variables like USER or HOME
6301
- user: ta.Optional[str] = None
6302
- uid: ta.Optional[int] = None
6303
-
6304
- # An octal number (e.g. 002, 022) representing the umask of the process.
6305
- umask: ta.Optional[int] = None
6306
-
6307
- #
6308
-
6309
- # A file path representing a directory to which supervisord should temporarily chdir before exec'ing the child.
6310
- directory: ta.Optional[str] = None
6311
-
6312
- # A list of key/value pairs in the form KEY="val",KEY2="val2" that will be placed in the child process' environment.
6313
- # The environment string may contain Python string expressions that will be evaluated against a dictionary
6314
- # containing group_name, host_node_name, process_num, program_name, and here (the directory of the supervisord
6315
- # config file). Values containing non-alphanumeric characters should be quoted (e.g. KEY="val:123",KEY2="val,456").
6316
- # Otherwise, quoting the values is optional but recommended. Note that the subprocess will inherit the environment
6317
- # variables of the shell used to start “supervisord” except for the ones overridden here.
6318
- environment: ta.Optional[ta.Mapping[str, str]] = None
6319
-
6320
- #
6321
-
6322
- # The relative priority of the program in the start and shutdown ordering. Lower priorities indicate programs that
6323
- # start first and shut down last at startup and when aggregate commands are used in various clients (e.g. “start
6324
- # all”/”stop all”). Higher priorities indicate programs that start last and shut down first.
6325
- priority: int = 999
6326
-
6327
- # If true, this program will start automatically when supervisord is started.
6328
- auto_start: bool = True
6329
-
6330
- # Specifies if supervisord should automatically restart a process if it exits when it is in the RUNNING state. May
6331
- # be one of false, unexpected, or true. If false, the process will not be autorestarted. If unexpected, the process
6332
- # will be restarted when the program exits with an exit code that is not one of the exit codes associated with this
6333
- # process' configuration (see exitcodes). If true, the process will be unconditionally restarted when it exits,
6334
- # without regard to its exit code.
6335
- #
6336
- # Note: autorestart controls whether supervisord will autorestart a program if it exits after it has successfully
6337
- # started up (the process is in the RUNNING state). supervisord has a different restart mechanism for when the
6338
- # process is starting up (the process is in the STARTING state). Retries during process startup are controlled by
6339
- # startsecs and startretries.
6340
- auto_restart: str = 'unexpected'
6915
+ ########################################
6916
+ # ../types.py
6341
6917
 
6342
- # The total number of seconds which the program needs to stay running after a startup to consider the start
6343
- # successful (moving the process from the STARTING state to the RUNNING state). Set to 0 to indicate that the
6344
- # program needn't stay running for any particular amount of time.
6345
- #
6346
- # Note: Even if a process exits with an “expected” exit code (see exitcodes), the start will still be considered a
6347
- # failure if the process exits quicker than startsecs.
6348
- start_secs: int = 1
6349
6918
 
6350
- # The number of serial failure attempts that supervisord will allow when attempting to start the program before
6351
- # giving up and putting the process into an FATAL state.
6352
- #
6353
- # Note: After each failed restart, process will be put in BACKOFF state and each retry attempt will take
6354
- # increasingly more time.
6355
- start_retries: int = 3
6919
+ ##
6356
6920
 
6357
- # The signal used to kill the program when a stop is requested. This can be specified using the signal's name or its
6358
- # number. It is normally one of: TERM, HUP, INT, QUIT, KILL, USR1, or USR2.
6359
- stop_signal: int = signal.SIGTERM
6360
6921
 
6361
- # The number of seconds to wait for the OS to return a SIGCHLD to supervisord after the program has been sent a
6362
- # stopsignal. If this number of seconds elapses before supervisord receives a SIGCHLD from the process, supervisord
6363
- # will attempt to kill it with a final SIGKILL.
6364
- stop_wait_secs: int = 10
6922
+ class ExitNow(Exception): # noqa
6923
+ pass
6365
6924
 
6366
- # If true, the flag causes supervisor to send the stop signal to the whole process group and implies killasgroup is
6367
- # true. This is useful for programs, such as Flask in debug mode, that do not propagate stop signals to their
6368
- # children, leaving them orphaned.
6369
- stop_as_group: bool = False
6370
6925
 
6371
- # If true, when resorting to send SIGKILL to the program to terminate it send it to its whole process group instead,
6372
- # taking care of its children as well, useful e.g with Python programs using multiprocessing.
6373
- kill_as_group: bool = False
6926
+ ServerEpoch = ta.NewType('ServerEpoch', int)
6374
6927
 
6375
- # The list of “expected” exit codes for this program used with autorestart. If the autorestart parameter is set to
6376
- # unexpected, and the process exits in any other way than as a result of a supervisor stop request, supervisord will
6377
- # restart the process if it exits with an exit code that is not defined in this list.
6378
- #
6379
- # Note: In Supervisor versions prior to 4.0, the default was 0,2. In Supervisor 4.0, the default was changed to 0.
6380
- exitcodes: ta.Sequence[int] = (0,)
6381
6928
 
6382
- #
6929
+ ##
6383
6930
 
6384
- @dc.dataclass(frozen=True)
6385
- class Log:
6386
- file: ta.Optional[str] = None
6387
- capture_max_bytes: ta.Optional[int] = None
6388
- events_enabled: bool = False
6389
- syslog: bool = False
6390
- backups: ta.Optional[int] = None
6391
- max_bytes: ta.Optional[int] = None
6392
6931
 
6393
- stdout: Log = Log()
6394
- stderr: Log = Log()
6932
+ @functools.total_ordering
6933
+ class ConfigPriorityOrdered(abc.ABC):
6934
+ @property
6935
+ @abc.abstractmethod
6936
+ def config(self) -> ta.Any:
6937
+ raise NotImplementedError
6395
6938
 
6396
- # If true, cause the process' stderr output to be sent back to supervisord on its stdout file descriptor (in UNIX
6397
- # shell terms, this is the equivalent of executing /the/program 2>&1).
6398
- #
6399
- # Note: Do not set redirect_stderr=true in an [eventlistener:x] section. Eventlisteners use stdout and stdin to
6400
- # communicate with supervisord. If stderr is redirected, output from stderr will interfere with the eventlistener
6401
- # protocol.
6402
- redirect_stderr: bool = False
6939
+ def __lt__(self, other):
6940
+ return self.config.priority < other.config.priority
6403
6941
 
6942
+ def __eq__(self, other):
6943
+ return self.config.priority == other.config.priority
6404
6944
 
6405
- @dc.dataclass(frozen=True)
6406
- class ProcessGroupConfig:
6407
- name: str
6408
6945
 
6409
- priority: int = 999
6946
+ ##
6410
6947
 
6411
- processes: ta.Optional[ta.Sequence[ProcessConfig]] = None
6412
6948
 
6949
+ class SupervisorStateManager(abc.ABC):
6950
+ @property
6951
+ @abc.abstractmethod
6952
+ def state(self) -> SupervisorState:
6953
+ raise NotImplementedError
6413
6954
 
6414
- @dc.dataclass(frozen=True)
6415
- class ServerConfig:
6416
- # Instruct supervisord to switch users to this UNIX user account before doing any meaningful processing. The user
6417
- # can only be switched if supervisord is started as the root user.
6418
- user: ta.Optional[str] = None
6955
+ @abc.abstractmethod
6956
+ def set_state(self, state: SupervisorState) -> None:
6957
+ raise NotImplementedError
6419
6958
 
6420
- # If true, supervisord will start in the foreground instead of daemonizing.
6421
- nodaemon: bool = False
6422
6959
 
6423
- # The umask of the supervisord process.
6424
- umask: int = 0o22
6960
+ ##
6425
6961
 
6426
- #
6427
6962
 
6428
- # When supervisord daemonizes, switch to this directory. This option can include the value %(here)s, which expands
6429
- # to the directory in which the supervisord configuration file was found.
6430
- directory: ta.Optional[str] = None
6963
+ class HasDispatchers(abc.ABC):
6964
+ @abc.abstractmethod
6965
+ def get_dispatchers(self) -> 'Dispatchers':
6966
+ raise NotImplementedError
6431
6967
 
6432
- # The location in which supervisord keeps its pid file. This option can include the value %(here)s, which expands to
6433
- # the directory in which the supervisord configuration file was found.
6434
- pidfile: str = 'supervisord.pid'
6435
6968
 
6436
- # The identifier string for this supervisor process, used by the RPC interface.
6437
- identifier: str = 'supervisor'
6969
+ class ProcessDispatcher(FdioHandler, abc.ABC):
6970
+ @property
6971
+ @abc.abstractmethod
6972
+ def channel(self) -> ProcessOutputChannel:
6973
+ raise NotImplementedError
6438
6974
 
6439
- # The minimum number of file descriptors that must be available before supervisord will start successfully.
6440
- min_fds: int = 1024
6441
- # The minimum number of process descriptors that must be available before supervisord will start successfully.
6442
- min_procs: int = 200
6975
+ @property
6976
+ @abc.abstractmethod
6977
+ def process(self) -> 'Process':
6978
+ raise NotImplementedError
6443
6979
 
6444
- # Prevent supervisord from clearing any existing AUTO child log files at startup time. Useful for debugging
6445
- nocleanup: bool = False
6446
6980
 
6447
- # Strip all ANSI escape sequences from child log files.
6448
- strip_ansi: bool = False
6981
+ class ProcessOutputDispatcher(ProcessDispatcher, abc.ABC):
6982
+ @abc.abstractmethod
6983
+ def remove_logs(self) -> None:
6984
+ raise NotImplementedError
6449
6985
 
6450
- #
6986
+ @abc.abstractmethod
6987
+ def reopen_logs(self) -> None:
6988
+ raise NotImplementedError
6451
6989
 
6452
- # The path to the activity log of the supervisord process. This option can include the value %(here)s, which expands
6453
- # to the directory in which the supervisord configuration file was found.
6454
- logfile: str = 'supervisord.log'
6455
6990
 
6456
- # The maximum number of bytes that may be consumed by the activity log file before it is rotated (suffix multipliers
6457
- # like “KB”, “MB”, and “GB” can be used in the value). Set this value to 0 to indicate an unlimited log size.
6458
- logfile_max_bytes: int = 50 * 1024 * 1024
6991
+ class ProcessInputDispatcher(ProcessDispatcher, abc.ABC):
6992
+ @abc.abstractmethod
6993
+ def write(self, chars: ta.Union[bytes, str]) -> None:
6994
+ raise NotImplementedError
6459
6995
 
6460
- # The number of backups to keep around resulting from activity log file rotation. If set to 0, no backups will be
6461
- # kept.
6462
- logfile_backups: int = 10
6996
+ @abc.abstractmethod
6997
+ def flush(self) -> None:
6998
+ raise NotImplementedError
6463
6999
 
6464
- # The logging level, dictating what is written to the supervisord activity log. One of critical, error, warn, info,
6465
- # debug, trace, or blather. Note that at log level debug, the supervisord log file will record the stderr/stdout
6466
- # output of its child processes and extended info about process state changes, which is useful for debugging a
6467
- # process which isn't starting properly.
6468
- loglevel: int = logging.INFO
6469
7000
 
6470
- # The directory used for AUTO child log files. This option can include the value %(here)s, which expands to the
6471
- # directory in which the supervisord configuration file was found.
6472
- child_logdir: str = '/dev/null'
7001
+ ##
6473
7002
 
6474
- # If true and not daemonized, logs will not be directed to stdout.
6475
- silent: bool = False
6476
7003
 
6477
- #
7004
+ class Process(
7005
+ ConfigPriorityOrdered,
7006
+ HasDispatchers,
7007
+ abc.ABC,
7008
+ ):
7009
+ @property
7010
+ @abc.abstractmethod
7011
+ def name(self) -> str:
7012
+ raise NotImplementedError
6478
7013
 
6479
- groups: ta.Optional[ta.Sequence[ProcessGroupConfig]] = None
7014
+ @property
7015
+ @abc.abstractmethod
7016
+ def config(self) -> ProcessConfig:
7017
+ raise NotImplementedError
6480
7018
 
6481
- # TODO: implement - make sure to accept broken symlinks
6482
- group_config_dirs: ta.Optional[ta.Sequence[str]] = None
7019
+ @property
7020
+ @abc.abstractmethod
7021
+ def group(self) -> 'ProcessGroup':
7022
+ raise NotImplementedError
7023
+
7024
+ @property
7025
+ @abc.abstractmethod
7026
+ def pid(self) -> Pid:
7027
+ raise NotImplementedError
6483
7028
 
6484
7029
  #
6485
7030
 
6486
- http_port: ta.Optional[int] = None
7031
+ @abc.abstractmethod
7032
+ def finish(self, sts: Rc) -> None:
7033
+ raise NotImplementedError
6487
7034
 
6488
- #
7035
+ @abc.abstractmethod
7036
+ def stop(self) -> ta.Optional[str]:
7037
+ raise NotImplementedError
6489
7038
 
6490
- @classmethod
6491
- def new(
6492
- cls,
6493
- *,
6494
- umask: ta.Union[int, str] = 0o22,
6495
- directory: ta.Optional[str] = None,
6496
- logfile: str = 'supervisord.log',
6497
- logfile_max_bytes: ta.Union[int, str] = 50 * 1024 * 1024,
6498
- loglevel: ta.Union[int, str] = logging.INFO,
6499
- pidfile: str = 'supervisord.pid',
6500
- child_logdir: ta.Optional[str] = None,
6501
- **kwargs: ta.Any,
6502
- ) -> 'ServerConfig':
6503
- return cls(
6504
- umask=parse_octal(umask),
6505
- directory=check_existing_dir(directory) if directory is not None else None,
6506
- logfile=check_path_with_existing_dir(logfile),
6507
- logfile_max_bytes=parse_bytes_size(logfile_max_bytes),
6508
- loglevel=parse_logging_level(loglevel),
6509
- pidfile=check_path_with_existing_dir(pidfile),
6510
- child_logdir=child_logdir if child_logdir else tempfile.gettempdir(),
6511
- **kwargs,
6512
- )
7039
+ @abc.abstractmethod
7040
+ def give_up(self) -> None:
7041
+ raise NotImplementedError
7042
+
7043
+ @abc.abstractmethod
7044
+ def transition(self) -> None:
7045
+ raise NotImplementedError
7046
+
7047
+ @property
7048
+ @abc.abstractmethod
7049
+ def state(self) -> ProcessState:
7050
+ raise NotImplementedError
7051
+
7052
+ @abc.abstractmethod
7053
+ def after_setuid(self) -> None:
7054
+ raise NotImplementedError
6513
7055
 
6514
7056
 
6515
7057
  ##
6516
7058
 
6517
7059
 
6518
- def prepare_process_group_config(dct: ConfigMapping) -> ConfigMapping:
6519
- out = dict(dct)
6520
- out['processes'] = build_config_named_children(out.get('processes'))
6521
- return out
7060
+ class ProcessGroup(
7061
+ ConfigPriorityOrdered,
7062
+ KeyedCollectionAccessors[str, Process],
7063
+ abc.ABC,
7064
+ ):
7065
+ @property
7066
+ @abc.abstractmethod
7067
+ def name(self) -> str:
7068
+ raise NotImplementedError
6522
7069
 
7070
+ @property
7071
+ @abc.abstractmethod
7072
+ def config(self) -> ProcessGroupConfig:
7073
+ raise NotImplementedError
6523
7074
 
6524
- def prepare_server_config(dct: ta.Mapping[str, ta.Any]) -> ta.Mapping[str, ta.Any]:
6525
- out = dict(dct)
6526
- group_dcts = build_config_named_children(out.get('groups'))
6527
- out['groups'] = [prepare_process_group_config(group_dct) for group_dct in group_dcts or []]
6528
- return out
7075
+ @property
7076
+ @abc.abstractmethod
7077
+ def by_name(self) -> ta.Mapping[str, Process]:
7078
+ raise NotImplementedError
6529
7079
 
7080
+ #
6530
7081
 
6531
- ##
7082
+ @abc.abstractmethod
7083
+ def stop_all(self) -> None:
7084
+ raise NotImplementedError
6532
7085
 
7086
+ @abc.abstractmethod
7087
+ def get_unstopped_processes(self) -> ta.List[Process]:
7088
+ raise NotImplementedError
6533
7089
 
6534
- def parse_logging_level(value: ta.Union[str, int]) -> int:
6535
- if isinstance(value, int):
6536
- return value
6537
- s = str(value).lower()
6538
- level = logging.getLevelNamesMapping().get(s.upper())
6539
- if level is None:
6540
- raise ValueError(f'bad logging level name {value!r}')
6541
- return level
7090
+ @abc.abstractmethod
7091
+ def before_remove(self) -> None:
7092
+ raise NotImplementedError
6542
7093
 
6543
7094
 
6544
7095
  ########################################
@@ -7047,368 +7598,59 @@ class CoroHttpServer:
7047
7598
  ##
7048
7599
 
7049
7600
 
7050
- class CoroHttpServerSocketHandler(SocketHandler):
7051
- def __init__(
7052
- self,
7053
- client_address: SocketAddress,
7054
- rfile: ta.BinaryIO,
7055
- wfile: ta.BinaryIO,
7056
- *,
7057
- server_factory: CoroHttpServerFactory,
7058
- log_handler: ta.Optional[ta.Callable[[CoroHttpServer, CoroHttpServer.AnyLogIo], None]] = None,
7059
- ) -> None:
7060
- super().__init__(
7061
- client_address,
7062
- rfile,
7063
- wfile,
7064
- )
7065
-
7066
- self._server_factory = server_factory
7067
- self._log_handler = log_handler
7068
-
7069
- def handle(self) -> None:
7070
- server = self._server_factory(self._client_address)
7071
-
7072
- gen = server.coro_handle()
7073
-
7074
- o = next(gen)
7075
- while True:
7076
- if isinstance(o, CoroHttpServer.AnyLogIo):
7077
- i = None
7078
- if self._log_handler is not None:
7079
- self._log_handler(server, o)
7080
-
7081
- elif isinstance(o, CoroHttpServer.ReadIo):
7082
- i = self._rfile.read(o.sz)
7083
-
7084
- elif isinstance(o, CoroHttpServer.ReadLineIo):
7085
- i = self._rfile.readline(o.sz)
7086
-
7087
- elif isinstance(o, CoroHttpServer.WriteIo):
7088
- i = None
7089
- self._wfile.write(o.data)
7090
- self._wfile.flush()
7091
-
7092
- else:
7093
- raise TypeError(o)
7094
-
7095
- try:
7096
- if i is not None:
7097
- o = gen.send(i)
7098
- else:
7099
- o = next(gen)
7100
- except StopIteration:
7101
- break
7102
-
7103
-
7104
- ########################################
7105
- # ../types.py
7106
-
7107
-
7108
- ##
7109
-
7110
-
7111
- class ExitNow(Exception): # noqa
7112
- pass
7113
-
7114
-
7115
- ServerEpoch = ta.NewType('ServerEpoch', int)
7116
-
7117
-
7118
- ##
7119
-
7120
-
7121
- @functools.total_ordering
7122
- class ConfigPriorityOrdered(abc.ABC):
7123
- @property
7124
- @abc.abstractmethod
7125
- def config(self) -> ta.Any:
7126
- raise NotImplementedError
7127
-
7128
- def __lt__(self, other):
7129
- return self.config.priority < other.config.priority
7130
-
7131
- def __eq__(self, other):
7132
- return self.config.priority == other.config.priority
7133
-
7134
-
7135
- ##
7136
-
7137
-
7138
- class SupervisorStateManager(abc.ABC):
7139
- @property
7140
- @abc.abstractmethod
7141
- def state(self) -> SupervisorState:
7142
- raise NotImplementedError
7143
-
7144
- @abc.abstractmethod
7145
- def set_state(self, state: SupervisorState) -> None:
7146
- raise NotImplementedError
7147
-
7148
-
7149
- ##
7150
-
7151
-
7152
- class HasDispatchers(abc.ABC):
7153
- @abc.abstractmethod
7154
- def get_dispatchers(self) -> 'Dispatchers':
7155
- raise NotImplementedError
7156
-
7157
-
7158
- class ProcessDispatcher(FdioHandler, abc.ABC):
7159
- @property
7160
- @abc.abstractmethod
7161
- def channel(self) -> ProcessOutputChannel:
7162
- raise NotImplementedError
7163
-
7164
- @property
7165
- @abc.abstractmethod
7166
- def process(self) -> 'Process':
7167
- raise NotImplementedError
7168
-
7169
-
7170
- class ProcessOutputDispatcher(ProcessDispatcher, abc.ABC):
7171
- @abc.abstractmethod
7172
- def remove_logs(self) -> None:
7173
- raise NotImplementedError
7174
-
7175
- @abc.abstractmethod
7176
- def reopen_logs(self) -> None:
7177
- raise NotImplementedError
7178
-
7179
-
7180
- class ProcessInputDispatcher(ProcessDispatcher, abc.ABC):
7181
- @abc.abstractmethod
7182
- def write(self, chars: ta.Union[bytes, str]) -> None:
7183
- raise NotImplementedError
7184
-
7185
- @abc.abstractmethod
7186
- def flush(self) -> None:
7187
- raise NotImplementedError
7188
-
7189
-
7190
- ##
7191
-
7192
-
7193
- class Process(
7194
- ConfigPriorityOrdered,
7195
- HasDispatchers,
7196
- abc.ABC,
7197
- ):
7198
- @property
7199
- @abc.abstractmethod
7200
- def name(self) -> str:
7201
- raise NotImplementedError
7202
-
7203
- @property
7204
- @abc.abstractmethod
7205
- def config(self) -> ProcessConfig:
7206
- raise NotImplementedError
7207
-
7208
- @property
7209
- @abc.abstractmethod
7210
- def group(self) -> 'ProcessGroup':
7211
- raise NotImplementedError
7212
-
7213
- @property
7214
- @abc.abstractmethod
7215
- def pid(self) -> Pid:
7216
- raise NotImplementedError
7217
-
7218
- #
7219
-
7220
- @abc.abstractmethod
7221
- def finish(self, sts: Rc) -> None:
7222
- raise NotImplementedError
7223
-
7224
- @abc.abstractmethod
7225
- def stop(self) -> ta.Optional[str]:
7226
- raise NotImplementedError
7227
-
7228
- @abc.abstractmethod
7229
- def give_up(self) -> None:
7230
- raise NotImplementedError
7231
-
7232
- @abc.abstractmethod
7233
- def transition(self) -> None:
7234
- raise NotImplementedError
7235
-
7236
- @property
7237
- @abc.abstractmethod
7238
- def state(self) -> ProcessState:
7239
- raise NotImplementedError
7240
-
7241
- @abc.abstractmethod
7242
- def after_setuid(self) -> None:
7243
- raise NotImplementedError
7244
-
7245
-
7246
- ##
7247
-
7248
-
7249
- class ProcessGroup(
7250
- ConfigPriorityOrdered,
7251
- KeyedCollectionAccessors[str, Process],
7252
- abc.ABC,
7253
- ):
7254
- @property
7255
- @abc.abstractmethod
7256
- def name(self) -> str:
7257
- raise NotImplementedError
7258
-
7259
- @property
7260
- @abc.abstractmethod
7261
- def config(self) -> ProcessGroupConfig:
7262
- raise NotImplementedError
7263
-
7264
- @property
7265
- @abc.abstractmethod
7266
- def by_name(self) -> ta.Mapping[str, Process]:
7267
- raise NotImplementedError
7268
-
7269
- #
7270
-
7271
- @abc.abstractmethod
7272
- def stop_all(self) -> None:
7273
- raise NotImplementedError
7274
-
7275
- @abc.abstractmethod
7276
- def get_unstopped_processes(self) -> ta.List[Process]:
7277
- raise NotImplementedError
7278
-
7279
- @abc.abstractmethod
7280
- def before_remove(self) -> None:
7281
- raise NotImplementedError
7282
-
7283
-
7284
- ########################################
7285
- # ../../../omlish/http/coro/fdio.py
7286
-
7287
-
7288
- class CoroHttpServerConnectionFdioHandler(SocketFdioHandler):
7601
+ class CoroHttpServerSocketHandler(SocketHandler):
7289
7602
  def __init__(
7290
7603
  self,
7291
- addr: SocketAddress,
7292
- sock: socket.socket,
7293
- handler: HttpHandler,
7604
+ client_address: SocketAddress,
7605
+ rfile: ta.BinaryIO,
7606
+ wfile: ta.BinaryIO,
7294
7607
  *,
7295
- read_size: int = 0x10000,
7296
- write_size: int = 0x10000,
7608
+ server_factory: CoroHttpServerFactory,
7297
7609
  log_handler: ta.Optional[ta.Callable[[CoroHttpServer, CoroHttpServer.AnyLogIo], None]] = None,
7298
7610
  ) -> None:
7299
- check.state(not sock.getblocking())
7300
-
7301
- super().__init__(addr, sock)
7302
-
7303
- self._handler = handler
7304
- self._read_size = read_size
7305
- self._write_size = write_size
7306
- self._log_handler = log_handler
7307
-
7308
- self._read_buf = ReadableListBuffer()
7309
- self._write_buf: IncrementalWriteBuffer | None = None
7310
-
7311
- self._coro_srv = CoroHttpServer(
7312
- addr,
7313
- handler=self._handler,
7611
+ super().__init__(
7612
+ client_address,
7613
+ rfile,
7614
+ wfile,
7314
7615
  )
7315
- self._srv_coro: ta.Optional[ta.Generator[CoroHttpServer.Io, ta.Optional[bytes], None]] = self._coro_srv.coro_handle() # noqa
7316
7616
 
7317
- self._cur_io: CoroHttpServer.Io | None = None
7318
- self._next_io()
7617
+ self._server_factory = server_factory
7618
+ self._log_handler = log_handler
7319
7619
 
7320
- #
7620
+ def handle(self) -> None:
7621
+ server = self._server_factory(self._client_address)
7321
7622
 
7322
- def _next_io(self) -> None: # noqa
7323
- coro = check.not_none(self._srv_coro)
7623
+ gen = server.coro_handle()
7324
7624
 
7325
- d: bytes | None = None
7326
- o = self._cur_io
7625
+ o = next(gen)
7327
7626
  while True:
7328
- if o is None:
7329
- try:
7330
- if d is not None:
7331
- o = coro.send(d)
7332
- d = None
7333
- else:
7334
- o = next(coro)
7335
- except StopIteration:
7336
- self.close()
7337
- o = None
7338
- break
7339
-
7340
7627
  if isinstance(o, CoroHttpServer.AnyLogIo):
7628
+ i = None
7341
7629
  if self._log_handler is not None:
7342
- self._log_handler(self._coro_srv, o)
7343
- o = None
7630
+ self._log_handler(server, o)
7344
7631
 
7345
7632
  elif isinstance(o, CoroHttpServer.ReadIo):
7346
- if (d := self._read_buf.read(o.sz)) is None:
7347
- break
7348
- o = None
7633
+ i = self._rfile.read(o.sz)
7349
7634
 
7350
7635
  elif isinstance(o, CoroHttpServer.ReadLineIo):
7351
- if (d := self._read_buf.read_until(b'\n')) is None:
7352
- break
7353
- o = None
7636
+ i = self._rfile.readline(o.sz)
7354
7637
 
7355
7638
  elif isinstance(o, CoroHttpServer.WriteIo):
7356
- check.none(self._write_buf)
7357
- self._write_buf = IncrementalWriteBuffer(o.data, write_size=self._write_size)
7358
- break
7639
+ i = None
7640
+ self._wfile.write(o.data)
7641
+ self._wfile.flush()
7359
7642
 
7360
7643
  else:
7361
7644
  raise TypeError(o)
7362
7645
 
7363
- self._cur_io = o
7364
-
7365
- #
7366
-
7367
- def readable(self) -> bool:
7368
- return True
7369
-
7370
- def writable(self) -> bool:
7371
- return self._write_buf is not None
7372
-
7373
- #
7374
-
7375
- def on_readable(self) -> None:
7376
- try:
7377
- buf = check.not_none(self._sock).recv(self._read_size)
7378
- except BlockingIOError:
7379
- return
7380
- except ConnectionResetError:
7381
- self.close()
7382
- return
7383
- if not buf:
7384
- self.close()
7385
- return
7386
-
7387
- self._read_buf.feed(buf)
7388
-
7389
- if isinstance(self._cur_io, CoroHttpServer.AnyReadIo):
7390
- self._next_io()
7391
-
7392
- def on_writable(self) -> None:
7393
- check.isinstance(self._cur_io, CoroHttpServer.WriteIo)
7394
- wb = check.not_none(self._write_buf)
7395
- while wb.rem > 0:
7396
- def send(d: bytes) -> int:
7397
- try:
7398
- return check.not_none(self._sock).send(d)
7399
- except ConnectionResetError:
7400
- self.close()
7401
- return 0
7402
- except BlockingIOError:
7403
- return 0
7404
- if not wb.write(send):
7646
+ try:
7647
+ if i is not None:
7648
+ o = gen.send(i)
7649
+ else:
7650
+ o = next(gen)
7651
+ except StopIteration:
7405
7652
  break
7406
7653
 
7407
- if wb.rem < 1:
7408
- self._write_buf = None
7409
- self._cur_io = None
7410
- self._next_io()
7411
-
7412
7654
 
7413
7655
  ########################################
7414
7656
  # ../dispatchers.py
@@ -8110,6 +8352,135 @@ class SupervisorSetupImpl(SupervisorSetup):
8110
8352
  os.umask(self._config.umask)
8111
8353
 
8112
8354
 
8355
+ ########################################
8356
+ # ../../../omlish/http/coro/fdio.py
8357
+
8358
+
8359
+ class CoroHttpServerConnectionFdioHandler(SocketFdioHandler):
8360
+ def __init__(
8361
+ self,
8362
+ addr: SocketAddress,
8363
+ sock: socket.socket,
8364
+ handler: HttpHandler,
8365
+ *,
8366
+ read_size: int = 0x10000,
8367
+ write_size: int = 0x10000,
8368
+ log_handler: ta.Optional[ta.Callable[[CoroHttpServer, CoroHttpServer.AnyLogIo], None]] = None,
8369
+ ) -> None:
8370
+ check.state(not sock.getblocking())
8371
+
8372
+ super().__init__(addr, sock)
8373
+
8374
+ self._handler = handler
8375
+ self._read_size = read_size
8376
+ self._write_size = write_size
8377
+ self._log_handler = log_handler
8378
+
8379
+ self._read_buf = ReadableListBuffer()
8380
+ self._write_buf: IncrementalWriteBuffer | None = None
8381
+
8382
+ self._coro_srv = CoroHttpServer(
8383
+ addr,
8384
+ handler=self._handler,
8385
+ )
8386
+ self._srv_coro: ta.Optional[ta.Generator[CoroHttpServer.Io, ta.Optional[bytes], None]] = self._coro_srv.coro_handle() # noqa
8387
+
8388
+ self._cur_io: CoroHttpServer.Io | None = None
8389
+ self._next_io()
8390
+
8391
+ #
8392
+
8393
+ def _next_io(self) -> None: # noqa
8394
+ coro = check.not_none(self._srv_coro)
8395
+
8396
+ d: bytes | None = None
8397
+ o = self._cur_io
8398
+ while True:
8399
+ if o is None:
8400
+ try:
8401
+ if d is not None:
8402
+ o = coro.send(d)
8403
+ d = None
8404
+ else:
8405
+ o = next(coro)
8406
+ except StopIteration:
8407
+ self.close()
8408
+ o = None
8409
+ break
8410
+
8411
+ if isinstance(o, CoroHttpServer.AnyLogIo):
8412
+ if self._log_handler is not None:
8413
+ self._log_handler(self._coro_srv, o)
8414
+ o = None
8415
+
8416
+ elif isinstance(o, CoroHttpServer.ReadIo):
8417
+ if (d := self._read_buf.read(o.sz)) is None:
8418
+ break
8419
+ o = None
8420
+
8421
+ elif isinstance(o, CoroHttpServer.ReadLineIo):
8422
+ if (d := self._read_buf.read_until(b'\n')) is None:
8423
+ break
8424
+ o = None
8425
+
8426
+ elif isinstance(o, CoroHttpServer.WriteIo):
8427
+ check.none(self._write_buf)
8428
+ self._write_buf = IncrementalWriteBuffer(o.data, write_size=self._write_size)
8429
+ break
8430
+
8431
+ else:
8432
+ raise TypeError(o)
8433
+
8434
+ self._cur_io = o
8435
+
8436
+ #
8437
+
8438
+ def readable(self) -> bool:
8439
+ return True
8440
+
8441
+ def writable(self) -> bool:
8442
+ return self._write_buf is not None
8443
+
8444
+ #
8445
+
8446
+ def on_readable(self) -> None:
8447
+ try:
8448
+ buf = check.not_none(self._sock).recv(self._read_size)
8449
+ except BlockingIOError:
8450
+ return
8451
+ except ConnectionResetError:
8452
+ self.close()
8453
+ return
8454
+ if not buf:
8455
+ self.close()
8456
+ return
8457
+
8458
+ self._read_buf.feed(buf)
8459
+
8460
+ if isinstance(self._cur_io, CoroHttpServer.AnyReadIo):
8461
+ self._next_io()
8462
+
8463
+ def on_writable(self) -> None:
8464
+ check.isinstance(self._cur_io, CoroHttpServer.WriteIo)
8465
+ wb = check.not_none(self._write_buf)
8466
+ while wb.rem > 0:
8467
+ def send(d: bytes) -> int:
8468
+ try:
8469
+ return check.not_none(self._sock).send(d)
8470
+ except ConnectionResetError:
8471
+ self.close()
8472
+ return 0
8473
+ except BlockingIOError:
8474
+ return 0
8475
+ if not wb.write(send):
8476
+ break
8477
+
8478
+ if wb.rem < 1:
8479
+ self._write_buf = None
8480
+ self._cur_io = None
8481
+ self._next_io()
8482
+
8483
+
8113
8484
  ########################################
8114
8485
  # ../groups.py
8115
8486
 
@@ -9685,7 +10056,7 @@ def main(
9685
10056
 
9686
10057
  # if we hup, restart by making a new Supervisor()
9687
10058
  for epoch in itertools.count():
9688
- config = read_config_file(
10059
+ config = load_config_file_obj(
9689
10060
  os.path.expanduser(cf),
9690
10061
  ServerConfig,
9691
10062
  prepare=prepare_server_config,