ominfra 0.0.0.dev119__py3-none-any.whl → 0.0.0.dev121__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
@@ -18,10 +18,12 @@ import fcntl
18
18
  import fractions
19
19
  import functools
20
20
  import grp
21
+ import inspect
21
22
  import itertools
22
23
  import json
23
24
  import logging
24
25
  import os
26
+ import os.path
25
27
  import pwd
26
28
  import re
27
29
  import resource
@@ -29,6 +31,7 @@ import select
29
31
  import shlex
30
32
  import signal
31
33
  import stat
34
+ import string
32
35
  import sys
33
36
  import syslog
34
37
  import tempfile
@@ -53,6 +56,11 @@ if sys.version_info < (3, 8):
53
56
  ########################################
54
57
 
55
58
 
59
+ # ../../../omdev/toml/parser.py
60
+ TomlParseFloat = ta.Callable[[str], ta.Any]
61
+ TomlKey = ta.Tuple[str, ...]
62
+ TomlPos = int # ta.TypeAlias
63
+
56
64
  # ../compat.py
57
65
  T = ta.TypeVar('T')
58
66
 
@@ -60,6 +68,837 @@ T = ta.TypeVar('T')
60
68
  ProcessState = int # ta.TypeAlias
61
69
  SupervisorState = int # ta.TypeAlias
62
70
 
71
+ # ../../../omlish/lite/inject.py
72
+ InjectorKeyCls = ta.Union[type, ta.NewType]
73
+ InjectorProviderFn = ta.Callable[['Injector'], ta.Any]
74
+ InjectorProviderFnMap = ta.Mapping['InjectorKey', 'InjectorProviderFn']
75
+ InjectorBindingOrBindings = ta.Union['InjectorBinding', 'InjectorBindings']
76
+
77
+ # ../../configs.py
78
+ ConfigMapping = ta.Mapping[str, ta.Any]
79
+
80
+ # ../context.py
81
+ ServerEpoch = ta.NewType('ServerEpoch', int)
82
+ InheritedFds = ta.NewType('InheritedFds', ta.FrozenSet[int])
83
+
84
+
85
+ ########################################
86
+ # ../../../omdev/toml/parser.py
87
+ # SPDX-License-Identifier: MIT
88
+ # SPDX-FileCopyrightText: 2021 Taneli Hukkinen
89
+ # Licensed to PSF under a Contributor Agreement.
90
+ #
91
+ # PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
92
+ # --------------------------------------------
93
+ #
94
+ # 1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and the Individual or Organization
95
+ # ("Licensee") accessing and otherwise using this software ("Python") in source or binary form and its associated
96
+ # documentation.
97
+ #
98
+ # 2. Subject to the terms and conditions of this License Agreement, PSF hereby grants Licensee a nonexclusive,
99
+ # royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative
100
+ # works, distribute, and otherwise use Python alone or in any derivative version, provided, however, that PSF's License
101
+ # Agreement and PSF's notice of copyright, i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
102
+ # 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation; All
103
+ # Rights Reserved" are retained in Python alone or in any derivative version prepared by Licensee.
104
+ #
105
+ # 3. In the event Licensee prepares a derivative work that is based on or incorporates Python or any part thereof, and
106
+ # wants to make the derivative work available to others as provided herein, then Licensee hereby agrees to include in
107
+ # any such work a brief summary of the changes made to Python.
108
+ #
109
+ # 4. PSF is making Python available to Licensee on an "AS IS" basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES,
110
+ # EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY
111
+ # OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT INFRINGE ANY THIRD PARTY
112
+ # RIGHTS.
113
+ #
114
+ # 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL
115
+ # DAMAGES OR LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, OR ANY DERIVATIVE THEREOF, EVEN IF
116
+ # ADVISED OF THE POSSIBILITY THEREOF.
117
+ #
118
+ # 6. This License Agreement will automatically terminate upon a material breach of its terms and conditions.
119
+ #
120
+ # 7. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint
121
+ # venture between PSF and Licensee. This License Agreement does not grant permission to use PSF trademarks or trade
122
+ # name in a trademark sense to endorse or promote products or services of Licensee, or any third party.
123
+ #
124
+ # 8. By copying, installing or otherwise using Python, Licensee agrees to be bound by the terms and conditions of this
125
+ # License Agreement.
126
+ #
127
+ # https://github.com/python/cpython/blob/9ce90206b7a4649600218cf0bd4826db79c9a312/Lib/tomllib/_parser.py
128
+
129
+
130
+ ##
131
+
132
+
133
+ _TOML_TIME_RE_STR = r'([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(?:\.([0-9]{1,6})[0-9]*)?'
134
+
135
+ TOML_RE_NUMBER = re.compile(
136
+ r"""
137
+ 0
138
+ (?:
139
+ x[0-9A-Fa-f](?:_?[0-9A-Fa-f])* # hex
140
+ |
141
+ b[01](?:_?[01])* # bin
142
+ |
143
+ o[0-7](?:_?[0-7])* # oct
144
+ )
145
+ |
146
+ [+-]?(?:0|[1-9](?:_?[0-9])*) # dec, integer part
147
+ (?P<floatpart>
148
+ (?:\.[0-9](?:_?[0-9])*)? # optional fractional part
149
+ (?:[eE][+-]?[0-9](?:_?[0-9])*)? # optional exponent part
150
+ )
151
+ """,
152
+ flags=re.VERBOSE,
153
+ )
154
+ TOML_RE_LOCALTIME = re.compile(_TOML_TIME_RE_STR)
155
+ TOML_RE_DATETIME = re.compile(
156
+ rf"""
157
+ ([0-9]{{4}})-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01]) # date, e.g. 1988-10-27
158
+ (?:
159
+ [Tt ]
160
+ {_TOML_TIME_RE_STR}
161
+ (?:([Zz])|([+-])([01][0-9]|2[0-3]):([0-5][0-9]))? # optional time offset
162
+ )?
163
+ """,
164
+ flags=re.VERBOSE,
165
+ )
166
+
167
+
168
+ def toml_match_to_datetime(match: re.Match) -> ta.Union[datetime.datetime, datetime.date]:
169
+ """Convert a `RE_DATETIME` match to `datetime.datetime` or `datetime.date`.
170
+
171
+ Raises ValueError if the match does not correspond to a valid date or datetime.
172
+ """
173
+ (
174
+ year_str,
175
+ month_str,
176
+ day_str,
177
+ hour_str,
178
+ minute_str,
179
+ sec_str,
180
+ micros_str,
181
+ zulu_time,
182
+ offset_sign_str,
183
+ offset_hour_str,
184
+ offset_minute_str,
185
+ ) = match.groups()
186
+ year, month, day = int(year_str), int(month_str), int(day_str)
187
+ if hour_str is None:
188
+ return datetime.date(year, month, day)
189
+ hour, minute, sec = int(hour_str), int(minute_str), int(sec_str)
190
+ micros = int(micros_str.ljust(6, '0')) if micros_str else 0
191
+ if offset_sign_str:
192
+ tz: ta.Optional[datetime.tzinfo] = toml_cached_tz(
193
+ offset_hour_str, offset_minute_str, offset_sign_str,
194
+ )
195
+ elif zulu_time:
196
+ tz = datetime.UTC
197
+ else: # local date-time
198
+ tz = None
199
+ return datetime.datetime(year, month, day, hour, minute, sec, micros, tzinfo=tz)
200
+
201
+
202
+ @functools.lru_cache() # noqa
203
+ def toml_cached_tz(hour_str: str, minute_str: str, sign_str: str) -> datetime.timezone:
204
+ sign = 1 if sign_str == '+' else -1
205
+ return datetime.timezone(
206
+ datetime.timedelta(
207
+ hours=sign * int(hour_str),
208
+ minutes=sign * int(minute_str),
209
+ ),
210
+ )
211
+
212
+
213
+ def toml_match_to_localtime(match: re.Match) -> datetime.time:
214
+ hour_str, minute_str, sec_str, micros_str = match.groups()
215
+ micros = int(micros_str.ljust(6, '0')) if micros_str else 0
216
+ return datetime.time(int(hour_str), int(minute_str), int(sec_str), micros)
217
+
218
+
219
+ def toml_match_to_number(match: re.Match, parse_float: TomlParseFloat) -> ta.Any:
220
+ if match.group('floatpart'):
221
+ return parse_float(match.group())
222
+ return int(match.group(), 0)
223
+
224
+
225
+ TOML_ASCII_CTRL = frozenset(chr(i) for i in range(32)) | frozenset(chr(127))
226
+
227
+ # Neither of these sets include quotation mark or backslash. They are currently handled as separate cases in the parser
228
+ # functions.
229
+ TOML_ILLEGAL_BASIC_STR_CHARS = TOML_ASCII_CTRL - frozenset('\t')
230
+ TOML_ILLEGAL_MULTILINE_BASIC_STR_CHARS = TOML_ASCII_CTRL - frozenset('\t\n')
231
+
232
+ TOML_ILLEGAL_LITERAL_STR_CHARS = TOML_ILLEGAL_BASIC_STR_CHARS
233
+ TOML_ILLEGAL_MULTILINE_LITERAL_STR_CHARS = TOML_ILLEGAL_MULTILINE_BASIC_STR_CHARS
234
+
235
+ TOML_ILLEGAL_COMMENT_CHARS = TOML_ILLEGAL_BASIC_STR_CHARS
236
+
237
+ TOML_WS = frozenset(' \t')
238
+ TOML_WS_AND_NEWLINE = TOML_WS | frozenset('\n')
239
+ TOML_BARE_KEY_CHARS = frozenset(string.ascii_letters + string.digits + '-_')
240
+ TOML_KEY_INITIAL_CHARS = TOML_BARE_KEY_CHARS | frozenset("\"'")
241
+ TOML_HEXDIGIT_CHARS = frozenset(string.hexdigits)
242
+
243
+ TOML_BASIC_STR_ESCAPE_REPLACEMENTS = types.MappingProxyType(
244
+ {
245
+ '\\b': '\u0008', # backspace
246
+ '\\t': '\u0009', # tab
247
+ '\\n': '\u000A', # linefeed
248
+ '\\f': '\u000C', # form feed
249
+ '\\r': '\u000D', # carriage return
250
+ '\\"': '\u0022', # quote
251
+ '\\\\': '\u005C', # backslash
252
+ },
253
+ )
254
+
255
+
256
+ class TomlDecodeError(ValueError):
257
+ """An error raised if a document is not valid TOML."""
258
+
259
+
260
+ def toml_load(fp: ta.BinaryIO, /, *, parse_float: TomlParseFloat = float) -> ta.Dict[str, ta.Any]:
261
+ """Parse TOML from a binary file object."""
262
+ b = fp.read()
263
+ try:
264
+ s = b.decode()
265
+ except AttributeError:
266
+ raise TypeError("File must be opened in binary mode, e.g. use `open('foo.toml', 'rb')`") from None
267
+ return toml_loads(s, parse_float=parse_float)
268
+
269
+
270
+ def toml_loads(s: str, /, *, parse_float: TomlParseFloat = float) -> ta.Dict[str, ta.Any]: # noqa: C901
271
+ """Parse TOML from a string."""
272
+
273
+ # The spec allows converting "\r\n" to "\n", even in string literals. Let's do so to simplify parsing.
274
+ try:
275
+ src = s.replace('\r\n', '\n')
276
+ except (AttributeError, TypeError):
277
+ raise TypeError(f"Expected str object, not '{type(s).__qualname__}'") from None
278
+ pos = 0
279
+ out = TomlOutput(TomlNestedDict(), TomlFlags())
280
+ header: TomlKey = ()
281
+ parse_float = toml_make_safe_parse_float(parse_float)
282
+
283
+ # Parse one statement at a time (typically means one line in TOML source)
284
+ while True:
285
+ # 1. Skip line leading whitespace
286
+ pos = toml_skip_chars(src, pos, TOML_WS)
287
+
288
+ # 2. Parse rules. Expect one of the following:
289
+ # - end of file
290
+ # - end of line
291
+ # - comment
292
+ # - key/value pair
293
+ # - append dict to list (and move to its namespace)
294
+ # - create dict (and move to its namespace)
295
+ # Skip trailing whitespace when applicable.
296
+ try:
297
+ char = src[pos]
298
+ except IndexError:
299
+ break
300
+ if char == '\n':
301
+ pos += 1
302
+ continue
303
+ if char in TOML_KEY_INITIAL_CHARS:
304
+ pos = toml_key_value_rule(src, pos, out, header, parse_float)
305
+ pos = toml_skip_chars(src, pos, TOML_WS)
306
+ elif char == '[':
307
+ try:
308
+ second_char: ta.Optional[str] = src[pos + 1]
309
+ except IndexError:
310
+ second_char = None
311
+ out.flags.finalize_pending()
312
+ if second_char == '[':
313
+ pos, header = toml_create_list_rule(src, pos, out)
314
+ else:
315
+ pos, header = toml_create_dict_rule(src, pos, out)
316
+ pos = toml_skip_chars(src, pos, TOML_WS)
317
+ elif char != '#':
318
+ raise toml_suffixed_err(src, pos, 'Invalid statement')
319
+
320
+ # 3. Skip comment
321
+ pos = toml_skip_comment(src, pos)
322
+
323
+ # 4. Expect end of line or end of file
324
+ try:
325
+ char = src[pos]
326
+ except IndexError:
327
+ break
328
+ if char != '\n':
329
+ raise toml_suffixed_err(
330
+ src, pos, 'Expected newline or end of document after a statement',
331
+ )
332
+ pos += 1
333
+
334
+ return out.data.dict
335
+
336
+
337
+ class TomlFlags:
338
+ """Flags that map to parsed keys/namespaces."""
339
+
340
+ # Marks an immutable namespace (inline array or inline table).
341
+ FROZEN = 0
342
+ # Marks a nest that has been explicitly created and can no longer be opened using the "[table]" syntax.
343
+ EXPLICIT_NEST = 1
344
+
345
+ def __init__(self) -> None:
346
+ self._flags: ta.Dict[str, dict] = {}
347
+ self._pending_flags: ta.Set[ta.Tuple[TomlKey, int]] = set()
348
+
349
+ def add_pending(self, key: TomlKey, flag: int) -> None:
350
+ self._pending_flags.add((key, flag))
351
+
352
+ def finalize_pending(self) -> None:
353
+ for key, flag in self._pending_flags:
354
+ self.set(key, flag, recursive=False)
355
+ self._pending_flags.clear()
356
+
357
+ def unset_all(self, key: TomlKey) -> None:
358
+ cont = self._flags
359
+ for k in key[:-1]:
360
+ if k not in cont:
361
+ return
362
+ cont = cont[k]['nested']
363
+ cont.pop(key[-1], None)
364
+
365
+ def set(self, key: TomlKey, flag: int, *, recursive: bool) -> None: # noqa: A003
366
+ cont = self._flags
367
+ key_parent, key_stem = key[:-1], key[-1]
368
+ for k in key_parent:
369
+ if k not in cont:
370
+ cont[k] = {'flags': set(), 'recursive_flags': set(), 'nested': {}}
371
+ cont = cont[k]['nested']
372
+ if key_stem not in cont:
373
+ cont[key_stem] = {'flags': set(), 'recursive_flags': set(), 'nested': {}}
374
+ cont[key_stem]['recursive_flags' if recursive else 'flags'].add(flag)
375
+
376
+ def is_(self, key: TomlKey, flag: int) -> bool:
377
+ if not key:
378
+ return False # document root has no flags
379
+ cont = self._flags
380
+ for k in key[:-1]:
381
+ if k not in cont:
382
+ return False
383
+ inner_cont = cont[k]
384
+ if flag in inner_cont['recursive_flags']:
385
+ return True
386
+ cont = inner_cont['nested']
387
+ key_stem = key[-1]
388
+ if key_stem in cont:
389
+ cont = cont[key_stem]
390
+ return flag in cont['flags'] or flag in cont['recursive_flags']
391
+ return False
392
+
393
+
394
+ class TomlNestedDict:
395
+ def __init__(self) -> None:
396
+ # The parsed content of the TOML document
397
+ self.dict: ta.Dict[str, ta.Any] = {}
398
+
399
+ def get_or_create_nest(
400
+ self,
401
+ key: TomlKey,
402
+ *,
403
+ access_lists: bool = True,
404
+ ) -> dict:
405
+ cont: ta.Any = self.dict
406
+ for k in key:
407
+ if k not in cont:
408
+ cont[k] = {}
409
+ cont = cont[k]
410
+ if access_lists and isinstance(cont, list):
411
+ cont = cont[-1]
412
+ if not isinstance(cont, dict):
413
+ raise KeyError('There is no nest behind this key')
414
+ return cont
415
+
416
+ def append_nest_to_list(self, key: TomlKey) -> None:
417
+ cont = self.get_or_create_nest(key[:-1])
418
+ last_key = key[-1]
419
+ if last_key in cont:
420
+ list_ = cont[last_key]
421
+ if not isinstance(list_, list):
422
+ raise KeyError('An object other than list found behind this key')
423
+ list_.append({})
424
+ else:
425
+ cont[last_key] = [{}]
426
+
427
+
428
+ class TomlOutput(ta.NamedTuple):
429
+ data: TomlNestedDict
430
+ flags: TomlFlags
431
+
432
+
433
+ def toml_skip_chars(src: str, pos: TomlPos, chars: ta.Iterable[str]) -> TomlPos:
434
+ try:
435
+ while src[pos] in chars:
436
+ pos += 1
437
+ except IndexError:
438
+ pass
439
+ return pos
440
+
441
+
442
+ def toml_skip_until(
443
+ src: str,
444
+ pos: TomlPos,
445
+ expect: str,
446
+ *,
447
+ error_on: ta.FrozenSet[str],
448
+ error_on_eof: bool,
449
+ ) -> TomlPos:
450
+ try:
451
+ new_pos = src.index(expect, pos)
452
+ except ValueError:
453
+ new_pos = len(src)
454
+ if error_on_eof:
455
+ raise toml_suffixed_err(src, new_pos, f'Expected {expect!r}') from None
456
+
457
+ if not error_on.isdisjoint(src[pos:new_pos]):
458
+ while src[pos] not in error_on:
459
+ pos += 1
460
+ raise toml_suffixed_err(src, pos, f'Found invalid character {src[pos]!r}')
461
+ return new_pos
462
+
463
+
464
+ def toml_skip_comment(src: str, pos: TomlPos) -> TomlPos:
465
+ try:
466
+ char: ta.Optional[str] = src[pos]
467
+ except IndexError:
468
+ char = None
469
+ if char == '#':
470
+ return toml_skip_until(
471
+ src, pos + 1, '\n', error_on=TOML_ILLEGAL_COMMENT_CHARS, error_on_eof=False,
472
+ )
473
+ return pos
474
+
475
+
476
+ def toml_skip_comments_and_array_ws(src: str, pos: TomlPos) -> TomlPos:
477
+ while True:
478
+ pos_before_skip = pos
479
+ pos = toml_skip_chars(src, pos, TOML_WS_AND_NEWLINE)
480
+ pos = toml_skip_comment(src, pos)
481
+ if pos == pos_before_skip:
482
+ return pos
483
+
484
+
485
+ def toml_create_dict_rule(src: str, pos: TomlPos, out: TomlOutput) -> ta.Tuple[TomlPos, TomlKey]:
486
+ pos += 1 # Skip "["
487
+ pos = toml_skip_chars(src, pos, TOML_WS)
488
+ pos, key = toml_parse_key(src, pos)
489
+
490
+ if out.flags.is_(key, TomlFlags.EXPLICIT_NEST) or out.flags.is_(key, TomlFlags.FROZEN):
491
+ raise toml_suffixed_err(src, pos, f'Cannot declare {key} twice')
492
+ out.flags.set(key, TomlFlags.EXPLICIT_NEST, recursive=False)
493
+ try:
494
+ out.data.get_or_create_nest(key)
495
+ except KeyError:
496
+ raise toml_suffixed_err(src, pos, 'Cannot overwrite a value') from None
497
+
498
+ if not src.startswith(']', pos):
499
+ raise toml_suffixed_err(src, pos, "Expected ']' at the end of a table declaration")
500
+ return pos + 1, key
501
+
502
+
503
+ def toml_create_list_rule(src: str, pos: TomlPos, out: TomlOutput) -> ta.Tuple[TomlPos, TomlKey]:
504
+ pos += 2 # Skip "[["
505
+ pos = toml_skip_chars(src, pos, TOML_WS)
506
+ pos, key = toml_parse_key(src, pos)
507
+
508
+ if out.flags.is_(key, TomlFlags.FROZEN):
509
+ raise toml_suffixed_err(src, pos, f'Cannot mutate immutable namespace {key}')
510
+ # Free the namespace now that it points to another empty list item...
511
+ out.flags.unset_all(key)
512
+ # ...but this key precisely is still prohibited from table declaration
513
+ out.flags.set(key, TomlFlags.EXPLICIT_NEST, recursive=False)
514
+ try:
515
+ out.data.append_nest_to_list(key)
516
+ except KeyError:
517
+ raise toml_suffixed_err(src, pos, 'Cannot overwrite a value') from None
518
+
519
+ if not src.startswith(']]', pos):
520
+ raise toml_suffixed_err(src, pos, "Expected ']]' at the end of an array declaration")
521
+ return pos + 2, key
522
+
523
+
524
+ def toml_key_value_rule(
525
+ src: str,
526
+ pos: TomlPos,
527
+ out: TomlOutput,
528
+ header: TomlKey,
529
+ parse_float: TomlParseFloat,
530
+ ) -> TomlPos:
531
+ pos, key, value = toml_parse_key_value_pair(src, pos, parse_float)
532
+ key_parent, key_stem = key[:-1], key[-1]
533
+ abs_key_parent = header + key_parent
534
+
535
+ relative_path_cont_keys = (header + key[:i] for i in range(1, len(key)))
536
+ for cont_key in relative_path_cont_keys:
537
+ # Check that dotted key syntax does not redefine an existing table
538
+ if out.flags.is_(cont_key, TomlFlags.EXPLICIT_NEST):
539
+ raise toml_suffixed_err(src, pos, f'Cannot redefine namespace {cont_key}')
540
+ # Containers in the relative path can't be opened with the table syntax or dotted key/value syntax in following
541
+ # table sections.
542
+ out.flags.add_pending(cont_key, TomlFlags.EXPLICIT_NEST)
543
+
544
+ if out.flags.is_(abs_key_parent, TomlFlags.FROZEN):
545
+ raise toml_suffixed_err(
546
+ src,
547
+ pos,
548
+ f'Cannot mutate immutable namespace {abs_key_parent}',
549
+ )
550
+
551
+ try:
552
+ nest = out.data.get_or_create_nest(abs_key_parent)
553
+ except KeyError:
554
+ raise toml_suffixed_err(src, pos, 'Cannot overwrite a value') from None
555
+ if key_stem in nest:
556
+ raise toml_suffixed_err(src, pos, 'Cannot overwrite a value')
557
+ # Mark inline table and array namespaces recursively immutable
558
+ if isinstance(value, (dict, list)):
559
+ out.flags.set(header + key, TomlFlags.FROZEN, recursive=True)
560
+ nest[key_stem] = value
561
+ return pos
562
+
563
+
564
+ def toml_parse_key_value_pair(
565
+ src: str,
566
+ pos: TomlPos,
567
+ parse_float: TomlParseFloat,
568
+ ) -> ta.Tuple[TomlPos, TomlKey, ta.Any]:
569
+ pos, key = toml_parse_key(src, pos)
570
+ try:
571
+ char: ta.Optional[str] = src[pos]
572
+ except IndexError:
573
+ char = None
574
+ if char != '=':
575
+ raise toml_suffixed_err(src, pos, "Expected '=' after a key in a key/value pair")
576
+ pos += 1
577
+ pos = toml_skip_chars(src, pos, TOML_WS)
578
+ pos, value = toml_parse_value(src, pos, parse_float)
579
+ return pos, key, value
580
+
581
+
582
+ def toml_parse_key(src: str, pos: TomlPos) -> ta.Tuple[TomlPos, TomlKey]:
583
+ pos, key_part = toml_parse_key_part(src, pos)
584
+ key: TomlKey = (key_part,)
585
+ pos = toml_skip_chars(src, pos, TOML_WS)
586
+ while True:
587
+ try:
588
+ char: ta.Optional[str] = src[pos]
589
+ except IndexError:
590
+ char = None
591
+ if char != '.':
592
+ return pos, key
593
+ pos += 1
594
+ pos = toml_skip_chars(src, pos, TOML_WS)
595
+ pos, key_part = toml_parse_key_part(src, pos)
596
+ key += (key_part,)
597
+ pos = toml_skip_chars(src, pos, TOML_WS)
598
+
599
+
600
+ def toml_parse_key_part(src: str, pos: TomlPos) -> ta.Tuple[TomlPos, str]:
601
+ try:
602
+ char: ta.Optional[str] = src[pos]
603
+ except IndexError:
604
+ char = None
605
+ if char in TOML_BARE_KEY_CHARS:
606
+ start_pos = pos
607
+ pos = toml_skip_chars(src, pos, TOML_BARE_KEY_CHARS)
608
+ return pos, src[start_pos:pos]
609
+ if char == "'":
610
+ return toml_parse_literal_str(src, pos)
611
+ if char == '"':
612
+ return toml_parse_one_line_basic_str(src, pos)
613
+ raise toml_suffixed_err(src, pos, 'Invalid initial character for a key part')
614
+
615
+
616
+ def toml_parse_one_line_basic_str(src: str, pos: TomlPos) -> ta.Tuple[TomlPos, str]:
617
+ pos += 1
618
+ return toml_parse_basic_str(src, pos, multiline=False)
619
+
620
+
621
+ def toml_parse_array(src: str, pos: TomlPos, parse_float: TomlParseFloat) -> ta.Tuple[TomlPos, list]:
622
+ pos += 1
623
+ array: list = []
624
+
625
+ pos = toml_skip_comments_and_array_ws(src, pos)
626
+ if src.startswith(']', pos):
627
+ return pos + 1, array
628
+ while True:
629
+ pos, val = toml_parse_value(src, pos, parse_float)
630
+ array.append(val)
631
+ pos = toml_skip_comments_and_array_ws(src, pos)
632
+
633
+ c = src[pos:pos + 1]
634
+ if c == ']':
635
+ return pos + 1, array
636
+ if c != ',':
637
+ raise toml_suffixed_err(src, pos, 'Unclosed array')
638
+ pos += 1
639
+
640
+ pos = toml_skip_comments_and_array_ws(src, pos)
641
+ if src.startswith(']', pos):
642
+ return pos + 1, array
643
+
644
+
645
+ def toml_parse_inline_table(src: str, pos: TomlPos, parse_float: TomlParseFloat) -> ta.Tuple[TomlPos, dict]:
646
+ pos += 1
647
+ nested_dict = TomlNestedDict()
648
+ flags = TomlFlags()
649
+
650
+ pos = toml_skip_chars(src, pos, TOML_WS)
651
+ if src.startswith('}', pos):
652
+ return pos + 1, nested_dict.dict
653
+ while True:
654
+ pos, key, value = toml_parse_key_value_pair(src, pos, parse_float)
655
+ key_parent, key_stem = key[:-1], key[-1]
656
+ if flags.is_(key, TomlFlags.FROZEN):
657
+ raise toml_suffixed_err(src, pos, f'Cannot mutate immutable namespace {key}')
658
+ try:
659
+ nest = nested_dict.get_or_create_nest(key_parent, access_lists=False)
660
+ except KeyError:
661
+ raise toml_suffixed_err(src, pos, 'Cannot overwrite a value') from None
662
+ if key_stem in nest:
663
+ raise toml_suffixed_err(src, pos, f'Duplicate inline table key {key_stem!r}')
664
+ nest[key_stem] = value
665
+ pos = toml_skip_chars(src, pos, TOML_WS)
666
+ c = src[pos:pos + 1]
667
+ if c == '}':
668
+ return pos + 1, nested_dict.dict
669
+ if c != ',':
670
+ raise toml_suffixed_err(src, pos, 'Unclosed inline table')
671
+ if isinstance(value, (dict, list)):
672
+ flags.set(key, TomlFlags.FROZEN, recursive=True)
673
+ pos += 1
674
+ pos = toml_skip_chars(src, pos, TOML_WS)
675
+
676
+
677
+ def toml_parse_basic_str_escape(
678
+ src: str,
679
+ pos: TomlPos,
680
+ *,
681
+ multiline: bool = False,
682
+ ) -> ta.Tuple[TomlPos, str]:
683
+ escape_id = src[pos:pos + 2]
684
+ pos += 2
685
+ if multiline and escape_id in {'\\ ', '\\\t', '\\\n'}:
686
+ # Skip whitespace until next non-whitespace character or end of the doc. Error if non-whitespace is found before
687
+ # newline.
688
+ if escape_id != '\\\n':
689
+ pos = toml_skip_chars(src, pos, TOML_WS)
690
+ try:
691
+ char = src[pos]
692
+ except IndexError:
693
+ return pos, ''
694
+ if char != '\n':
695
+ raise toml_suffixed_err(src, pos, "Unescaped '\\' in a string")
696
+ pos += 1
697
+ pos = toml_skip_chars(src, pos, TOML_WS_AND_NEWLINE)
698
+ return pos, ''
699
+ if escape_id == '\\u':
700
+ return toml_parse_hex_char(src, pos, 4)
701
+ if escape_id == '\\U':
702
+ return toml_parse_hex_char(src, pos, 8)
703
+ try:
704
+ return pos, TOML_BASIC_STR_ESCAPE_REPLACEMENTS[escape_id]
705
+ except KeyError:
706
+ raise toml_suffixed_err(src, pos, "Unescaped '\\' in a string") from None
707
+
708
+
709
+ def toml_parse_basic_str_escape_multiline(src: str, pos: TomlPos) -> ta.Tuple[TomlPos, str]:
710
+ return toml_parse_basic_str_escape(src, pos, multiline=True)
711
+
712
+
713
+ def toml_parse_hex_char(src: str, pos: TomlPos, hex_len: int) -> ta.Tuple[TomlPos, str]:
714
+ hex_str = src[pos:pos + hex_len]
715
+ if len(hex_str) != hex_len or not TOML_HEXDIGIT_CHARS.issuperset(hex_str):
716
+ raise toml_suffixed_err(src, pos, 'Invalid hex value')
717
+ pos += hex_len
718
+ hex_int = int(hex_str, 16)
719
+ if not toml_is_unicode_scalar_value(hex_int):
720
+ raise toml_suffixed_err(src, pos, 'Escaped character is not a Unicode scalar value')
721
+ return pos, chr(hex_int)
722
+
723
+
724
+ def toml_parse_literal_str(src: str, pos: TomlPos) -> ta.Tuple[TomlPos, str]:
725
+ pos += 1 # Skip starting apostrophe
726
+ start_pos = pos
727
+ pos = toml_skip_until(
728
+ src, pos, "'", error_on=TOML_ILLEGAL_LITERAL_STR_CHARS, error_on_eof=True,
729
+ )
730
+ return pos + 1, src[start_pos:pos] # Skip ending apostrophe
731
+
732
+
733
+ def toml_parse_multiline_str(src: str, pos: TomlPos, *, literal: bool) -> ta.Tuple[TomlPos, str]:
734
+ pos += 3
735
+ if src.startswith('\n', pos):
736
+ pos += 1
737
+
738
+ if literal:
739
+ delim = "'"
740
+ end_pos = toml_skip_until(
741
+ src,
742
+ pos,
743
+ "'''",
744
+ error_on=TOML_ILLEGAL_MULTILINE_LITERAL_STR_CHARS,
745
+ error_on_eof=True,
746
+ )
747
+ result = src[pos:end_pos]
748
+ pos = end_pos + 3
749
+ else:
750
+ delim = '"'
751
+ pos, result = toml_parse_basic_str(src, pos, multiline=True)
752
+
753
+ # Add at maximum two extra apostrophes/quotes if the end sequence is 4 or 5 chars long instead of just 3.
754
+ if not src.startswith(delim, pos):
755
+ return pos, result
756
+ pos += 1
757
+ if not src.startswith(delim, pos):
758
+ return pos, result + delim
759
+ pos += 1
760
+ return pos, result + (delim * 2)
761
+
762
+
763
+ def toml_parse_basic_str(src: str, pos: TomlPos, *, multiline: bool) -> ta.Tuple[TomlPos, str]:
764
+ if multiline:
765
+ error_on = TOML_ILLEGAL_MULTILINE_BASIC_STR_CHARS
766
+ parse_escapes = toml_parse_basic_str_escape_multiline
767
+ else:
768
+ error_on = TOML_ILLEGAL_BASIC_STR_CHARS
769
+ parse_escapes = toml_parse_basic_str_escape
770
+ result = ''
771
+ start_pos = pos
772
+ while True:
773
+ try:
774
+ char = src[pos]
775
+ except IndexError:
776
+ raise toml_suffixed_err(src, pos, 'Unterminated string') from None
777
+ if char == '"':
778
+ if not multiline:
779
+ return pos + 1, result + src[start_pos:pos]
780
+ if src.startswith('"""', pos):
781
+ return pos + 3, result + src[start_pos:pos]
782
+ pos += 1
783
+ continue
784
+ if char == '\\':
785
+ result += src[start_pos:pos]
786
+ pos, parsed_escape = parse_escapes(src, pos)
787
+ result += parsed_escape
788
+ start_pos = pos
789
+ continue
790
+ if char in error_on:
791
+ raise toml_suffixed_err(src, pos, f'Illegal character {char!r}')
792
+ pos += 1
793
+
794
+
795
+ def toml_parse_value( # noqa: C901
796
+ src: str,
797
+ pos: TomlPos,
798
+ parse_float: TomlParseFloat,
799
+ ) -> ta.Tuple[TomlPos, ta.Any]:
800
+ try:
801
+ char: ta.Optional[str] = src[pos]
802
+ except IndexError:
803
+ char = None
804
+
805
+ # IMPORTANT: order conditions based on speed of checking and likelihood
806
+
807
+ # Basic strings
808
+ if char == '"':
809
+ if src.startswith('"""', pos):
810
+ return toml_parse_multiline_str(src, pos, literal=False)
811
+ return toml_parse_one_line_basic_str(src, pos)
812
+
813
+ # Literal strings
814
+ if char == "'":
815
+ if src.startswith("'''", pos):
816
+ return toml_parse_multiline_str(src, pos, literal=True)
817
+ return toml_parse_literal_str(src, pos)
818
+
819
+ # Booleans
820
+ if char == 't':
821
+ if src.startswith('true', pos):
822
+ return pos + 4, True
823
+ if char == 'f':
824
+ if src.startswith('false', pos):
825
+ return pos + 5, False
826
+
827
+ # Arrays
828
+ if char == '[':
829
+ return toml_parse_array(src, pos, parse_float)
830
+
831
+ # Inline tables
832
+ if char == '{':
833
+ return toml_parse_inline_table(src, pos, parse_float)
834
+
835
+ # Dates and times
836
+ datetime_match = TOML_RE_DATETIME.match(src, pos)
837
+ if datetime_match:
838
+ try:
839
+ datetime_obj = toml_match_to_datetime(datetime_match)
840
+ except ValueError as e:
841
+ raise toml_suffixed_err(src, pos, 'Invalid date or datetime') from e
842
+ return datetime_match.end(), datetime_obj
843
+ localtime_match = TOML_RE_LOCALTIME.match(src, pos)
844
+ if localtime_match:
845
+ return localtime_match.end(), toml_match_to_localtime(localtime_match)
846
+
847
+ # Integers and "normal" floats. The regex will greedily match any type starting with a decimal char, so needs to be
848
+ # located after handling of dates and times.
849
+ number_match = TOML_RE_NUMBER.match(src, pos)
850
+ if number_match:
851
+ return number_match.end(), toml_match_to_number(number_match, parse_float)
852
+
853
+ # Special floats
854
+ first_three = src[pos:pos + 3]
855
+ if first_three in {'inf', 'nan'}:
856
+ return pos + 3, parse_float(first_three)
857
+ first_four = src[pos:pos + 4]
858
+ if first_four in {'-inf', '+inf', '-nan', '+nan'}:
859
+ return pos + 4, parse_float(first_four)
860
+
861
+ raise toml_suffixed_err(src, pos, 'Invalid value')
862
+
863
+
864
+ def toml_suffixed_err(src: str, pos: TomlPos, msg: str) -> TomlDecodeError:
865
+ """Return a `TomlDecodeError` where error message is suffixed with coordinates in source."""
866
+
867
+ def coord_repr(src: str, pos: TomlPos) -> str:
868
+ if pos >= len(src):
869
+ return 'end of document'
870
+ line = src.count('\n', 0, pos) + 1
871
+ if line == 1:
872
+ column = pos + 1
873
+ else:
874
+ column = pos - src.rindex('\n', 0, pos)
875
+ return f'line {line}, column {column}'
876
+
877
+ return TomlDecodeError(f'{msg} (at {coord_repr(src, pos)})')
878
+
879
+
880
+ def toml_is_unicode_scalar_value(codepoint: int) -> bool:
881
+ return (0 <= codepoint <= 55295) or (57344 <= codepoint <= 1114111)
882
+
883
+
884
+ def toml_make_safe_parse_float(parse_float: TomlParseFloat) -> TomlParseFloat:
885
+ """A decorator to make `parse_float` safe.
886
+
887
+ `parse_float` must not return dicts or lists, because these types would be mixed with parsed TOML tables and arrays,
888
+ thus confusing the parser. The returned decorated callable raises `ValueError` instead of returning illegal types.
889
+ """
890
+ # The default `float` callable never returns illegal types. Optimize it.
891
+ if parse_float is float:
892
+ return float
893
+
894
+ def safe_parse_float(float_str: str) -> ta.Any:
895
+ float_value = parse_float(float_str)
896
+ if isinstance(float_value, (dict, list)):
897
+ raise ValueError('parse_float must not return dicts or lists') # noqa
898
+ return float_value
899
+
900
+ return safe_parse_float
901
+
63
902
 
64
903
  ########################################
65
904
  # ../compat.py
@@ -215,6 +1054,19 @@ def close_fd(fd: int) -> bool:
215
1054
  return True
216
1055
 
217
1056
 
1057
+ def is_fd_open(fd: int) -> bool:
1058
+ try:
1059
+ n = os.dup(fd)
1060
+ except OSError:
1061
+ return False
1062
+ os.close(n)
1063
+ return True
1064
+
1065
+
1066
+ def get_open_fds(limit: int) -> ta.FrozenSet[int]:
1067
+ return frozenset(filter(is_fd_open, range(limit)))
1068
+
1069
+
218
1070
  def mktempfile(suffix: str, prefix: str, dir: str) -> str: # noqa
219
1071
  fd, filename = tempfile.mkstemp(suffix, prefix, dir)
220
1072
  os.close(fd)
@@ -489,7 +1341,7 @@ class _cached_nullary: # noqa
489
1341
  return bound
490
1342
 
491
1343
 
492
- def cached_nullary(fn: ta.Callable[..., T]) -> ta.Callable[..., T]:
1344
+ def cached_nullary(fn): # ta.Callable[..., T]) -> ta.Callable[..., T]:
493
1345
  return _cached_nullary(fn)
494
1346
 
495
1347
 
@@ -580,6 +1432,50 @@ json_dump_compact: ta.Callable[..., bytes] = functools.partial(json.dump, **JSON
580
1432
  json_dumps_compact: ta.Callable[..., str] = functools.partial(json.dumps, **JSON_COMPACT_KWARGS)
581
1433
 
582
1434
 
1435
+ ########################################
1436
+ # ../../../omlish/lite/maybes.py
1437
+
1438
+
1439
+ class Maybe(ta.Generic[T]):
1440
+ @property
1441
+ @abc.abstractmethod
1442
+ def present(self) -> bool:
1443
+ raise NotImplementedError
1444
+
1445
+ @abc.abstractmethod
1446
+ def must(self) -> T:
1447
+ raise NotImplementedError
1448
+
1449
+ @classmethod
1450
+ def just(cls, v: T) -> 'Maybe[T]':
1451
+ return tuple.__new__(_Maybe, (v,)) # noqa
1452
+
1453
+ _empty: ta.ClassVar['Maybe']
1454
+
1455
+ @classmethod
1456
+ def empty(cls) -> 'Maybe[T]':
1457
+ return Maybe._empty
1458
+
1459
+
1460
+ class _Maybe(Maybe[T], tuple):
1461
+ __slots__ = ()
1462
+
1463
+ def __init_subclass__(cls, **kwargs):
1464
+ raise TypeError
1465
+
1466
+ @property
1467
+ def present(self) -> bool:
1468
+ return bool(self)
1469
+
1470
+ def must(self) -> T:
1471
+ if not self:
1472
+ raise ValueError
1473
+ return self[0]
1474
+
1475
+
1476
+ Maybe._empty = tuple.__new__(_Maybe, ()) # noqa
1477
+
1478
+
583
1479
  ########################################
584
1480
  # ../../../omlish/lite/reflect.py
585
1481
 
@@ -628,179 +1524,681 @@ def deep_subclasses(cls: ta.Type[T]) -> ta.Iterator[ta.Type[T]]:
628
1524
 
629
1525
 
630
1526
  ########################################
631
- # ../configs.py
1527
+ # ../states.py
1528
+
1529
+
1530
+ ##
1531
+
1532
+
1533
+ def _names_by_code(states: ta.Any) -> ta.Dict[int, str]:
1534
+ d = {}
1535
+ for name in states.__dict__:
1536
+ if not name.startswith('__'):
1537
+ code = getattr(states, name)
1538
+ d[code] = name
1539
+ return d
1540
+
1541
+
1542
+ ##
1543
+
1544
+
1545
+ class ProcessStates:
1546
+ STOPPED = 0
1547
+ STARTING = 10
1548
+ RUNNING = 20
1549
+ BACKOFF = 30
1550
+ STOPPING = 40
1551
+ EXITED = 100
1552
+ FATAL = 200
1553
+ UNKNOWN = 1000
1554
+
1555
+
1556
+ STOPPED_STATES = (
1557
+ ProcessStates.STOPPED,
1558
+ ProcessStates.EXITED,
1559
+ ProcessStates.FATAL,
1560
+ ProcessStates.UNKNOWN,
1561
+ )
1562
+
1563
+ RUNNING_STATES = (
1564
+ ProcessStates.RUNNING,
1565
+ ProcessStates.BACKOFF,
1566
+ ProcessStates.STARTING,
1567
+ )
1568
+
1569
+ SIGNALLABLE_STATES = (
1570
+ ProcessStates.RUNNING,
1571
+ ProcessStates.STARTING,
1572
+ ProcessStates.STOPPING,
1573
+ )
1574
+
1575
+
1576
+ _process_states_by_code = _names_by_code(ProcessStates)
1577
+
1578
+
1579
+ def get_process_state_description(code: ProcessState) -> str:
1580
+ return check_not_none(_process_states_by_code.get(code))
1581
+
1582
+
1583
+ ##
1584
+
1585
+
1586
+ class SupervisorStates:
1587
+ FATAL = 2
1588
+ RUNNING = 1
1589
+ RESTARTING = 0
1590
+ SHUTDOWN = -1
1591
+
1592
+
1593
+ _supervisor_states_by_code = _names_by_code(SupervisorStates)
1594
+
1595
+
1596
+ def get_supervisor_state_description(code: SupervisorState) -> str:
1597
+ return check_not_none(_supervisor_states_by_code.get(code))
1598
+
1599
+
1600
+ ########################################
1601
+ # ../../../omlish/lite/inject.py
1602
+
1603
+
1604
+ ###
1605
+ # types
632
1606
 
633
1607
 
634
1608
  @dc.dataclass(frozen=True)
635
- class ProcessConfig:
636
- name: str
637
- command: str
1609
+ class InjectorKey:
1610
+ cls: InjectorKeyCls
1611
+ tag: ta.Any = None
1612
+ array: bool = False
638
1613
 
639
- uid: ta.Optional[int] = None
640
- directory: ta.Optional[str] = None
641
- umask: ta.Optional[int] = None
642
- priority: int = 999
643
1614
 
644
- autostart: bool = True
645
- autorestart: str = 'unexpected'
1615
+ ##
646
1616
 
647
- startsecs: int = 1
648
- startretries: int = 3
649
1617
 
650
- numprocs: int = 1
651
- numprocs_start: int = 0
1618
+ class InjectorProvider(abc.ABC):
1619
+ @abc.abstractmethod
1620
+ def provider_fn(self) -> InjectorProviderFn:
1621
+ raise NotImplementedError
652
1622
 
653
- @dc.dataclass(frozen=True)
654
- class Log:
655
- file: ta.Optional[str] = None
656
- capture_maxbytes: ta.Optional[int] = None
657
- events_enabled: bool = False
658
- syslog: bool = False
659
- backups: ta.Optional[int] = None
660
- maxbytes: ta.Optional[int] = None
661
1623
 
662
- stdout: Log = Log()
663
- stderr: Log = Log()
1624
+ ##
664
1625
 
665
- stopsignal: int = signal.SIGTERM
666
- stopwaitsecs: int = 10
667
- stopasgroup: bool = False
668
1626
 
669
- killasgroup: bool = False
1627
+ @dc.dataclass(frozen=True)
1628
+ class InjectorBinding:
1629
+ key: InjectorKey
1630
+ provider: InjectorProvider
670
1631
 
671
- exitcodes: ta.Sequence[int] = (0,)
672
1632
 
673
- redirect_stderr: bool = False
1633
+ class InjectorBindings(abc.ABC):
1634
+ @abc.abstractmethod
1635
+ def bindings(self) -> ta.Iterator[InjectorBinding]:
1636
+ raise NotImplementedError
674
1637
 
675
- environment: ta.Optional[ta.Mapping[str, str]] = None
1638
+ ##
1639
+
1640
+
1641
+ class Injector(abc.ABC):
1642
+ @abc.abstractmethod
1643
+ def try_provide(self, key: ta.Any) -> Maybe[ta.Any]:
1644
+ raise NotImplementedError
1645
+
1646
+ @abc.abstractmethod
1647
+ def provide(self, key: ta.Any) -> ta.Any:
1648
+ raise NotImplementedError
1649
+
1650
+ @abc.abstractmethod
1651
+ def provide_kwargs(self, obj: ta.Any) -> ta.Mapping[str, ta.Any]:
1652
+ raise NotImplementedError
1653
+
1654
+ @abc.abstractmethod
1655
+ def inject(self, obj: ta.Any) -> ta.Any:
1656
+ raise NotImplementedError
1657
+
1658
+
1659
+ ###
1660
+ # exceptions
676
1661
 
677
1662
 
678
1663
  @dc.dataclass(frozen=True)
679
- class ProcessGroupConfig:
1664
+ class InjectorKeyError(Exception):
1665
+ key: InjectorKey
1666
+
1667
+ source: ta.Any = None
1668
+ name: ta.Optional[str] = None
1669
+
1670
+
1671
+ @dc.dataclass(frozen=True)
1672
+ class UnboundInjectorKeyError(InjectorKeyError):
1673
+ pass
1674
+
1675
+
1676
+ @dc.dataclass(frozen=True)
1677
+ class DuplicateInjectorKeyError(InjectorKeyError):
1678
+ pass
1679
+
1680
+
1681
+ ###
1682
+ # keys
1683
+
1684
+
1685
+ def as_injector_key(o: ta.Any) -> InjectorKey:
1686
+ if o is inspect.Parameter.empty:
1687
+ raise TypeError(o)
1688
+ if isinstance(o, InjectorKey):
1689
+ return o
1690
+ if isinstance(o, (type, ta.NewType)):
1691
+ return InjectorKey(o)
1692
+ raise TypeError(o)
1693
+
1694
+
1695
+ ###
1696
+ # providers
1697
+
1698
+
1699
+ @dc.dataclass(frozen=True)
1700
+ class FnInjectorProvider(InjectorProvider):
1701
+ fn: ta.Any
1702
+
1703
+ def __post_init__(self) -> None:
1704
+ check_not_isinstance(self.fn, type)
1705
+
1706
+ def provider_fn(self) -> InjectorProviderFn:
1707
+ def pfn(i: Injector) -> ta.Any:
1708
+ return i.inject(self.fn)
1709
+
1710
+ return pfn
1711
+
1712
+
1713
+ @dc.dataclass(frozen=True)
1714
+ class CtorInjectorProvider(InjectorProvider):
1715
+ cls: type
1716
+
1717
+ def __post_init__(self) -> None:
1718
+ check_isinstance(self.cls, type)
1719
+
1720
+ def provider_fn(self) -> InjectorProviderFn:
1721
+ def pfn(i: Injector) -> ta.Any:
1722
+ return i.inject(self.cls)
1723
+
1724
+ return pfn
1725
+
1726
+
1727
+ @dc.dataclass(frozen=True)
1728
+ class ConstInjectorProvider(InjectorProvider):
1729
+ v: ta.Any
1730
+
1731
+ def provider_fn(self) -> InjectorProviderFn:
1732
+ return lambda _: self.v
1733
+
1734
+
1735
+ @dc.dataclass(frozen=True)
1736
+ class SingletonInjectorProvider(InjectorProvider):
1737
+ p: InjectorProvider
1738
+
1739
+ def __post_init__(self) -> None:
1740
+ check_isinstance(self.p, InjectorProvider)
1741
+
1742
+ def provider_fn(self) -> InjectorProviderFn:
1743
+ v = not_set = object()
1744
+
1745
+ def pfn(i: Injector) -> ta.Any:
1746
+ nonlocal v
1747
+ if v is not_set:
1748
+ v = ufn(i)
1749
+ return v
1750
+
1751
+ ufn = self.p.provider_fn()
1752
+ return pfn
1753
+
1754
+
1755
+ @dc.dataclass(frozen=True)
1756
+ class LinkInjectorProvider(InjectorProvider):
1757
+ k: InjectorKey
1758
+
1759
+ def __post_init__(self) -> None:
1760
+ check_isinstance(self.k, InjectorKey)
1761
+
1762
+ def provider_fn(self) -> InjectorProviderFn:
1763
+ def pfn(i: Injector) -> ta.Any:
1764
+ return i.provide(self.k)
1765
+
1766
+ return pfn
1767
+
1768
+
1769
+ @dc.dataclass(frozen=True)
1770
+ class ArrayInjectorProvider(InjectorProvider):
1771
+ ps: ta.Sequence[InjectorProvider]
1772
+
1773
+ def provider_fn(self) -> InjectorProviderFn:
1774
+ ps = [p.provider_fn() for p in self.ps]
1775
+
1776
+ def pfn(i: Injector) -> ta.Any:
1777
+ rv = []
1778
+ for ep in ps:
1779
+ o = ep(i)
1780
+ rv.append(o)
1781
+ return rv
1782
+
1783
+ return pfn
1784
+
1785
+
1786
+ ###
1787
+ # bindings
1788
+
1789
+
1790
+ @dc.dataclass(frozen=True)
1791
+ class _InjectorBindings(InjectorBindings):
1792
+ bs: ta.Optional[ta.Sequence[InjectorBinding]] = None
1793
+ ps: ta.Optional[ta.Sequence[InjectorBindings]] = None
1794
+
1795
+ def bindings(self) -> ta.Iterator[InjectorBinding]:
1796
+ if self.bs is not None:
1797
+ yield from self.bs
1798
+ if self.ps is not None:
1799
+ for p in self.ps:
1800
+ yield from p.bindings()
1801
+
1802
+
1803
+ def as_injector_bindings(*args: InjectorBindingOrBindings) -> InjectorBindings:
1804
+ bs: ta.List[InjectorBinding] = []
1805
+ ps: ta.List[InjectorBindings] = []
1806
+ for a in args:
1807
+ if isinstance(a, InjectorBindings):
1808
+ ps.append(a)
1809
+ elif isinstance(a, InjectorBinding):
1810
+ bs.append(a)
1811
+ else:
1812
+ raise TypeError(a)
1813
+ return _InjectorBindings(
1814
+ bs or None,
1815
+ ps or None,
1816
+ )
1817
+
1818
+
1819
+ ##
1820
+
1821
+
1822
+ @dc.dataclass(frozen=True)
1823
+ class OverridesInjectorBindings(InjectorBindings):
1824
+ p: InjectorBindings
1825
+ m: ta.Mapping[InjectorKey, InjectorBinding]
1826
+
1827
+ def bindings(self) -> ta.Iterator[InjectorBinding]:
1828
+ for b in self.p.bindings():
1829
+ yield self.m.get(b.key, b)
1830
+
1831
+
1832
+ def injector_override(p: InjectorBindings, *args: InjectorBindingOrBindings) -> InjectorBindings:
1833
+ m: ta.Dict[InjectorKey, InjectorBinding] = {}
1834
+ for b in as_injector_bindings(*args).bindings():
1835
+ if b.key in m:
1836
+ raise DuplicateInjectorKeyError(b.key)
1837
+ m[b.key] = b
1838
+ return OverridesInjectorBindings(p, m)
1839
+
1840
+
1841
+ ##
1842
+
1843
+
1844
+ def build_injector_provider_map(bs: InjectorBindings) -> ta.Mapping[InjectorKey, InjectorProvider]:
1845
+ pm: ta.Dict[InjectorKey, InjectorProvider] = {}
1846
+ am: ta.Dict[InjectorKey, ta.List[InjectorProvider]] = {}
1847
+
1848
+ for b in bs.bindings():
1849
+ if b.key.array:
1850
+ am.setdefault(b.key, []).append(b.provider)
1851
+ else:
1852
+ if b.key in pm:
1853
+ raise KeyError(b.key)
1854
+ pm[b.key] = b.provider
1855
+
1856
+ if am:
1857
+ for k, aps in am.items():
1858
+ pm[k] = ArrayInjectorProvider(aps)
1859
+
1860
+ return pm
1861
+
1862
+
1863
+ ###
1864
+ # inspection
1865
+
1866
+
1867
+ _INJECTION_SIGNATURE_CACHE: ta.MutableMapping[ta.Any, inspect.Signature] = weakref.WeakKeyDictionary()
1868
+
1869
+
1870
+ def _injection_signature(obj: ta.Any) -> inspect.Signature:
1871
+ try:
1872
+ return _INJECTION_SIGNATURE_CACHE[obj]
1873
+ except TypeError:
1874
+ return inspect.signature(obj)
1875
+ except KeyError:
1876
+ pass
1877
+ sig = inspect.signature(obj)
1878
+ _INJECTION_SIGNATURE_CACHE[obj] = sig
1879
+ return sig
1880
+
1881
+
1882
+ class InjectionKwarg(ta.NamedTuple):
680
1883
  name: str
1884
+ key: InjectorKey
1885
+ has_default: bool
1886
+
1887
+
1888
+ class InjectionKwargsTarget(ta.NamedTuple):
1889
+ obj: ta.Any
1890
+ kwargs: ta.Sequence[InjectionKwarg]
1891
+
1892
+
1893
+ def build_injection_kwargs_target(
1894
+ obj: ta.Any,
1895
+ *,
1896
+ skip_args: int = 0,
1897
+ skip_kwargs: ta.Optional[ta.Iterable[ta.Any]] = None,
1898
+ raw_optional: bool = False,
1899
+ ) -> InjectionKwargsTarget:
1900
+ sig = _injection_signature(obj)
1901
+
1902
+ seen: ta.Set[InjectorKey] = set(map(as_injector_key, skip_kwargs)) if skip_kwargs is not None else set()
1903
+ kws: ta.List[InjectionKwarg] = []
1904
+ for p in list(sig.parameters.values())[skip_args:]:
1905
+ if p.annotation is inspect.Signature.empty:
1906
+ if p.default is not inspect.Parameter.empty:
1907
+ raise KeyError(f'{obj}, {p.name}')
1908
+ continue
1909
+
1910
+ if p.kind not in (inspect.Parameter.POSITIONAL_OR_KEYWORD, inspect.Parameter.KEYWORD_ONLY):
1911
+ raise TypeError(sig)
1912
+
1913
+ ann = p.annotation
1914
+ if (
1915
+ not raw_optional and
1916
+ is_optional_alias(ann)
1917
+ ):
1918
+ ann = get_optional_alias_arg(ann)
1919
+
1920
+ k = as_injector_key(ann)
1921
+
1922
+ if k in seen:
1923
+ raise DuplicateInjectorKeyError(k)
1924
+ seen.add(k)
1925
+
1926
+ kws.append(InjectionKwarg(
1927
+ p.name,
1928
+ k,
1929
+ p.default is not inspect.Parameter.empty,
1930
+ ))
1931
+
1932
+ return InjectionKwargsTarget(
1933
+ obj,
1934
+ kws,
1935
+ )
1936
+
1937
+
1938
+ ###
1939
+ # binder
1940
+
1941
+
1942
+ class InjectorBinder:
1943
+ def __new__(cls, *args, **kwargs): # noqa
1944
+ raise TypeError
1945
+
1946
+ _FN_TYPES: ta.Tuple[type, ...] = (
1947
+ types.FunctionType,
1948
+ types.MethodType,
1949
+
1950
+ classmethod,
1951
+ staticmethod,
1952
+
1953
+ functools.partial,
1954
+ functools.partialmethod,
1955
+ )
1956
+
1957
+ @classmethod
1958
+ def _is_fn(cls, obj: ta.Any) -> bool:
1959
+ return isinstance(obj, cls._FN_TYPES)
1960
+
1961
+ @classmethod
1962
+ def bind_as_fn(cls, icls: ta.Type[T]) -> ta.Type[T]:
1963
+ check_isinstance(icls, type)
1964
+ if icls not in cls._FN_TYPES:
1965
+ cls._FN_TYPES = (*cls._FN_TYPES, icls)
1966
+ return icls
1967
+
1968
+ _BANNED_BIND_TYPES: ta.Tuple[type, ...] = (
1969
+ InjectorProvider,
1970
+ )
1971
+
1972
+ @classmethod
1973
+ def bind(
1974
+ cls,
1975
+ obj: ta.Any,
1976
+ *,
1977
+ key: ta.Any = None,
1978
+ tag: ta.Any = None,
1979
+ array: ta.Optional[bool] = None, # noqa
1980
+
1981
+ to_fn: ta.Any = None,
1982
+ to_ctor: ta.Any = None,
1983
+ to_const: ta.Any = None,
1984
+ to_key: ta.Any = None,
1985
+
1986
+ singleton: bool = False,
1987
+ ) -> InjectorBinding:
1988
+ if obj is None or obj is inspect.Parameter.empty:
1989
+ raise TypeError(obj)
1990
+ if isinstance(obj, cls._BANNED_BIND_TYPES):
1991
+ raise TypeError(obj)
1992
+
1993
+ ##
1994
+
1995
+ if key is not None:
1996
+ key = as_injector_key(key)
1997
+
1998
+ ##
1999
+
2000
+ has_to = (
2001
+ to_fn is not None or
2002
+ to_ctor is not None or
2003
+ to_const is not None or
2004
+ to_key is not None
2005
+ )
2006
+ if isinstance(obj, InjectorKey):
2007
+ if key is None:
2008
+ key = obj
2009
+ elif isinstance(obj, type):
2010
+ if not has_to:
2011
+ to_ctor = obj
2012
+ if key is None:
2013
+ key = InjectorKey(obj)
2014
+ elif cls._is_fn(obj) and not has_to:
2015
+ to_fn = obj
2016
+ if key is None:
2017
+ sig = _injection_signature(obj)
2018
+ ty = check_isinstance(sig.return_annotation, type)
2019
+ key = InjectorKey(ty)
2020
+ else:
2021
+ if to_const is not None:
2022
+ raise TypeError('Cannot bind instance with to_const')
2023
+ to_const = obj
2024
+ if key is None:
2025
+ key = InjectorKey(type(obj))
2026
+ del has_to
2027
+
2028
+ ##
2029
+
2030
+ if tag is not None:
2031
+ if key.tag is not None:
2032
+ raise TypeError('Tag already set')
2033
+ key = dc.replace(key, tag=tag)
2034
+
2035
+ if array is not None:
2036
+ key = dc.replace(key, array=array)
2037
+
2038
+ ##
2039
+
2040
+ providers: ta.List[InjectorProvider] = []
2041
+ if to_fn is not None:
2042
+ providers.append(FnInjectorProvider(to_fn))
2043
+ if to_ctor is not None:
2044
+ providers.append(CtorInjectorProvider(to_ctor))
2045
+ if to_const is not None:
2046
+ providers.append(ConstInjectorProvider(to_const))
2047
+ if to_key is not None:
2048
+ providers.append(LinkInjectorProvider(as_injector_key(to_key)))
2049
+ if not providers:
2050
+ raise TypeError('Must specify provider')
2051
+ if len(providers) > 1:
2052
+ raise TypeError('May not specify multiple providers')
2053
+ provider, = providers
2054
+
2055
+ ##
2056
+
2057
+ if singleton:
2058
+ provider = SingletonInjectorProvider(provider)
2059
+
2060
+ ##
2061
+
2062
+ binding = InjectorBinding(key, provider)
2063
+
2064
+ ##
2065
+
2066
+ return binding
681
2067
 
682
- priority: int = 999
683
2068
 
684
- processes: ta.Optional[ta.Sequence[ProcessConfig]] = None
2069
+ ###
2070
+ # injector
685
2071
 
686
2072
 
687
- @dc.dataclass(frozen=True)
688
- class ServerConfig:
689
- user: ta.Optional[str] = None
690
- nodaemon: bool = False
691
- umask: int = 0o22
692
- directory: ta.Optional[str] = None
693
- logfile: str = 'supervisord.log'
694
- logfile_maxbytes: int = 50 * 1024 * 1024
695
- logfile_backups: int = 10
696
- loglevel: int = logging.INFO
697
- pidfile: str = 'supervisord.pid'
698
- identifier: str = 'supervisor'
699
- child_logdir: str = '/dev/null'
700
- minfds: int = 1024
701
- minprocs: int = 200
702
- nocleanup: bool = False
703
- strip_ansi: bool = False
704
- silent: bool = False
2073
+ _INJECTOR_INJECTOR_KEY = InjectorKey(Injector)
705
2074
 
706
- groups: ta.Optional[ta.Sequence[ProcessGroupConfig]] = None
707
2075
 
708
- @classmethod
709
- def new(
710
- cls,
711
- umask: ta.Union[int, str] = 0o22,
712
- directory: ta.Optional[str] = None,
713
- logfile: str = 'supervisord.log',
714
- logfile_maxbytes: ta.Union[int, str] = 50 * 1024 * 1024,
715
- loglevel: ta.Union[int, str] = logging.INFO,
716
- pidfile: str = 'supervisord.pid',
717
- child_logdir: ta.Optional[str] = None,
718
- **kwargs: ta.Any,
719
- ) -> 'ServerConfig':
720
- return cls(
721
- umask=octal_type(umask),
722
- directory=existing_directory(directory) if directory is not None else None,
723
- logfile=existing_dirpath(logfile),
724
- logfile_maxbytes=byte_size(logfile_maxbytes),
725
- loglevel=logging_level(loglevel),
726
- pidfile=existing_dirpath(pidfile),
727
- child_logdir=child_logdir if child_logdir else tempfile.gettempdir(),
728
- **kwargs,
729
- )
2076
+ class _Injector(Injector):
2077
+ def __init__(self, bs: InjectorBindings, p: ta.Optional[Injector] = None) -> None:
2078
+ super().__init__()
730
2079
 
2080
+ self._bs = check_isinstance(bs, InjectorBindings)
2081
+ self._p: ta.Optional[Injector] = check_isinstance(p, (Injector, type(None)))
731
2082
 
732
- ########################################
733
- # ../states.py
2083
+ self._pfm = {k: v.provider_fn() for k, v in build_injector_provider_map(bs).items()}
734
2084
 
2085
+ if _INJECTOR_INJECTOR_KEY in self._pfm:
2086
+ raise DuplicateInjectorKeyError(_INJECTOR_INJECTOR_KEY)
735
2087
 
736
- ##
2088
+ def try_provide(self, key: ta.Any) -> Maybe[ta.Any]:
2089
+ key = as_injector_key(key)
737
2090
 
2091
+ if key == _INJECTOR_INJECTOR_KEY:
2092
+ return Maybe.just(self)
738
2093
 
739
- def _names_by_code(states: ta.Any) -> ta.Dict[int, str]:
740
- d = {}
741
- for name in states.__dict__:
742
- if not name.startswith('__'):
743
- code = getattr(states, name)
744
- d[code] = name
745
- return d
2094
+ fn = self._pfm.get(key)
2095
+ if fn is not None:
2096
+ return Maybe.just(fn(self))
746
2097
 
2098
+ if self._p is not None:
2099
+ pv = self._p.try_provide(key)
2100
+ if pv is not None:
2101
+ return Maybe.empty()
747
2102
 
748
- ##
2103
+ return Maybe.empty()
749
2104
 
2105
+ def provide(self, key: ta.Any) -> ta.Any:
2106
+ v = self.try_provide(key)
2107
+ if v.present:
2108
+ return v.must()
2109
+ raise UnboundInjectorKeyError(key)
750
2110
 
751
- class ProcessStates:
752
- STOPPED = 0
753
- STARTING = 10
754
- RUNNING = 20
755
- BACKOFF = 30
756
- STOPPING = 40
757
- EXITED = 100
758
- FATAL = 200
759
- UNKNOWN = 1000
2111
+ def provide_kwargs(self, obj: ta.Any) -> ta.Mapping[str, ta.Any]:
2112
+ kt = build_injection_kwargs_target(obj)
2113
+ ret: ta.Dict[str, ta.Any] = {}
2114
+ for kw in kt.kwargs:
2115
+ if kw.has_default:
2116
+ if not (mv := self.try_provide(kw.key)).present:
2117
+ continue
2118
+ v = mv.must()
2119
+ else:
2120
+ v = self.provide(kw.key)
2121
+ ret[kw.name] = v
2122
+ return ret
760
2123
 
2124
+ def inject(self, obj: ta.Any) -> ta.Any:
2125
+ kws = self.provide_kwargs(obj)
2126
+ return obj(**kws)
761
2127
 
762
- STOPPED_STATES = (
763
- ProcessStates.STOPPED,
764
- ProcessStates.EXITED,
765
- ProcessStates.FATAL,
766
- ProcessStates.UNKNOWN,
767
- )
768
2128
 
769
- RUNNING_STATES = (
770
- ProcessStates.RUNNING,
771
- ProcessStates.BACKOFF,
772
- ProcessStates.STARTING,
773
- )
2129
+ ###
2130
+ # injection helpers
774
2131
 
775
- SIGNALLABLE_STATES = (
776
- ProcessStates.RUNNING,
777
- ProcessStates.STARTING,
778
- ProcessStates.STOPPING,
779
- )
780
2132
 
2133
+ class Injection:
2134
+ def __new__(cls, *args, **kwargs): # noqa
2135
+ raise TypeError
781
2136
 
782
- _process_states_by_code = _names_by_code(ProcessStates)
2137
+ # keys
783
2138
 
2139
+ @classmethod
2140
+ def as_key(cls, o: ta.Any) -> InjectorKey:
2141
+ return as_injector_key(o)
784
2142
 
785
- def get_process_state_description(code: ProcessState) -> str:
786
- return check_not_none(_process_states_by_code.get(code))
2143
+ @classmethod
2144
+ def array(cls, o: ta.Any) -> InjectorKey:
2145
+ return dc.replace(as_injector_key(o), array=True)
787
2146
 
2147
+ @classmethod
2148
+ def tag(cls, o: ta.Any, t: ta.Any) -> InjectorKey:
2149
+ return dc.replace(as_injector_key(o), tag=t)
788
2150
 
789
- ##
2151
+ # bindings
790
2152
 
2153
+ @classmethod
2154
+ def as_bindings(cls, *args: InjectorBindingOrBindings) -> InjectorBindings:
2155
+ return as_injector_bindings(*args)
791
2156
 
792
- class SupervisorStates:
793
- FATAL = 2
794
- RUNNING = 1
795
- RESTARTING = 0
796
- SHUTDOWN = -1
2157
+ @classmethod
2158
+ def override(cls, p: InjectorBindings, *args: InjectorBindingOrBindings) -> InjectorBindings:
2159
+ return injector_override(p, *args)
797
2160
 
2161
+ # binder
798
2162
 
799
- _supervisor_states_by_code = _names_by_code(SupervisorStates)
2163
+ @classmethod
2164
+ def bind(
2165
+ cls,
2166
+ obj: ta.Any,
2167
+ *,
2168
+ key: ta.Any = None,
2169
+ tag: ta.Any = None,
2170
+ array: ta.Optional[bool] = None, # noqa
2171
+
2172
+ to_fn: ta.Any = None,
2173
+ to_ctor: ta.Any = None,
2174
+ to_const: ta.Any = None,
2175
+ to_key: ta.Any = None,
2176
+
2177
+ singleton: bool = False,
2178
+ ) -> InjectorBinding:
2179
+ return InjectorBinder.bind(
2180
+ obj,
2181
+
2182
+ key=key,
2183
+ tag=tag,
2184
+ array=array,
2185
+
2186
+ to_fn=to_fn,
2187
+ to_ctor=to_ctor,
2188
+ to_const=to_const,
2189
+ to_key=to_key,
2190
+
2191
+ singleton=singleton,
2192
+ )
800
2193
 
2194
+ # injector
801
2195
 
802
- def get_supervisor_state_description(code: SupervisorState) -> str:
803
- return check_not_none(_supervisor_states_by_code.get(code))
2196
+ @classmethod
2197
+ def create_injector(cls, *args: InjectorBindingOrBindings, p: ta.Optional[Injector] = None) -> Injector:
2198
+ return _Injector(as_injector_bindings(*args), p)
2199
+
2200
+
2201
+ inj = Injection
804
2202
 
805
2203
 
806
2204
  ########################################
@@ -1575,6 +2973,66 @@ def unmarshal_obj(o: ta.Any, ty: ta.Union[ta.Type[T], ta.Any]) -> T:
1575
2973
  return get_obj_marshaler(ty).unmarshal(o)
1576
2974
 
1577
2975
 
2976
+ ########################################
2977
+ # ../../configs.py
2978
+
2979
+
2980
+ def read_config_file(
2981
+ path: str,
2982
+ cls: ta.Type[T],
2983
+ *,
2984
+ prepare: ta.Optional[ta.Callable[[ConfigMapping], ConfigMapping]] = None,
2985
+ ) -> T:
2986
+ with open(path) as cf:
2987
+ if path.endswith('.toml'):
2988
+ config_dct = toml_loads(cf.read())
2989
+ else:
2990
+ config_dct = json.loads(cf.read())
2991
+
2992
+ if prepare is not None:
2993
+ config_dct = prepare(config_dct) # type: ignore
2994
+
2995
+ return unmarshal_obj(config_dct, cls)
2996
+
2997
+
2998
+ def build_config_named_children(
2999
+ o: ta.Union[
3000
+ ta.Sequence[ConfigMapping],
3001
+ ta.Mapping[str, ConfigMapping],
3002
+ None,
3003
+ ],
3004
+ *,
3005
+ name_key: str = 'name',
3006
+ ) -> ta.Optional[ta.Sequence[ConfigMapping]]:
3007
+ if o is None:
3008
+ return None
3009
+
3010
+ lst: ta.List[ConfigMapping] = []
3011
+ if isinstance(o, ta.Mapping):
3012
+ for k, v in o.items():
3013
+ check_isinstance(v, ta.Mapping)
3014
+ if name_key in v:
3015
+ n = v[name_key]
3016
+ if k != n:
3017
+ raise KeyError(f'Given names do not match: {n} != {k}')
3018
+ lst.append(v)
3019
+ else:
3020
+ lst.append({name_key: k, **v})
3021
+
3022
+ else:
3023
+ check_not_isinstance(o, str)
3024
+ lst.extend(o)
3025
+
3026
+ seen = set()
3027
+ for d in lst:
3028
+ n = d['name']
3029
+ if n in d:
3030
+ raise KeyError(f'Duplicate name: {n}')
3031
+ seen.add(n)
3032
+
3033
+ return lst
3034
+
3035
+
1578
3036
  ########################################
1579
3037
  # ../events.py
1580
3038
 
@@ -1602,12 +3060,9 @@ class EventCallbacks:
1602
3060
 
1603
3061
  EVENT_CALLBACKS = EventCallbacks()
1604
3062
 
1605
- notify_event = EVENT_CALLBACKS.notify
1606
- clear_events = EVENT_CALLBACKS.clear
1607
-
1608
3063
 
1609
3064
  class Event(abc.ABC): # noqa
1610
- """Abstract event type """
3065
+ """Abstract event type."""
1611
3066
 
1612
3067
 
1613
3068
  class ProcessLogEvent(Event, abc.ABC):
@@ -1687,7 +3142,7 @@ class RemoteCommunicationEvent(Event):
1687
3142
 
1688
3143
 
1689
3144
  class SupervisorStateChangeEvent(Event):
1690
- """ Abstract class """
3145
+ """Abstract class."""
1691
3146
 
1692
3147
  def payload(self):
1693
3148
  return ''
@@ -1709,7 +3164,7 @@ class EventRejectedEvent: # purposely does not subclass Event
1709
3164
 
1710
3165
 
1711
3166
  class ProcessStateEvent(Event):
1712
- """ Abstract class, never raised directly """
3167
+ """Abstract class, never raised directly."""
1713
3168
  frm = None
1714
3169
  to = None
1715
3170
 
@@ -1798,7 +3253,7 @@ class ProcessGroupRemovedEvent(ProcessGroupEvent):
1798
3253
 
1799
3254
 
1800
3255
  class TickEvent(Event):
1801
- """ Abstract """
3256
+ """Abstract."""
1802
3257
 
1803
3258
  def __init__(self, when, supervisord):
1804
3259
  super().__init__()
@@ -2080,34 +3535,155 @@ if sys.platform == 'darwin' or sys.platform.startswith('freebsd'):
2080
3535
 
2081
3536
  return readables, writables
2082
3537
 
2083
- def before_daemonize(self) -> None:
2084
- self.close()
3538
+ def before_daemonize(self) -> None:
3539
+ self.close()
3540
+
3541
+ def after_daemonize(self) -> None:
3542
+ self._kqueue = select.kqueue()
3543
+ for fd in self._readables:
3544
+ self.register_readable(fd)
3545
+ for fd in self._writables:
3546
+ self.register_writable(fd)
3547
+
3548
+ def close(self) -> None:
3549
+ self._kqueue.close() # type: ignore
3550
+ self._kqueue = None
3551
+
3552
+ else:
3553
+ KqueuePoller = None
3554
+
3555
+
3556
+ Poller: ta.Type[BasePoller]
3557
+ if (
3558
+ sys.platform == 'darwin' or sys.platform.startswith('freebsd') and
3559
+ hasattr(select, 'kqueue') and KqueuePoller is not None
3560
+ ):
3561
+ Poller = KqueuePoller
3562
+ elif hasattr(select, 'poll'):
3563
+ Poller = PollPoller
3564
+ else:
3565
+ Poller = SelectPoller
3566
+
3567
+
3568
+ ########################################
3569
+ # ../configs.py
3570
+
3571
+
3572
+ ##
3573
+
3574
+
3575
+ @dc.dataclass(frozen=True)
3576
+ class ProcessConfig:
3577
+ name: str
3578
+ command: str
3579
+
3580
+ uid: ta.Optional[int] = None
3581
+ directory: ta.Optional[str] = None
3582
+ umask: ta.Optional[int] = None
3583
+ priority: int = 999
3584
+
3585
+ autostart: bool = True
3586
+ autorestart: str = 'unexpected'
3587
+
3588
+ startsecs: int = 1
3589
+ startretries: int = 3
3590
+
3591
+ numprocs: int = 1
3592
+ numprocs_start: int = 0
3593
+
3594
+ @dc.dataclass(frozen=True)
3595
+ class Log:
3596
+ file: ta.Optional[str] = None
3597
+ capture_maxbytes: ta.Optional[int] = None
3598
+ events_enabled: bool = False
3599
+ syslog: bool = False
3600
+ backups: ta.Optional[int] = None
3601
+ maxbytes: ta.Optional[int] = None
3602
+
3603
+ stdout: Log = Log()
3604
+ stderr: Log = Log()
3605
+
3606
+ stopsignal: int = signal.SIGTERM
3607
+ stopwaitsecs: int = 10
3608
+ stopasgroup: bool = False
3609
+
3610
+ killasgroup: bool = False
3611
+
3612
+ exitcodes: ta.Sequence[int] = (0,)
3613
+
3614
+ redirect_stderr: bool = False
3615
+
3616
+ environment: ta.Optional[ta.Mapping[str, str]] = None
3617
+
3618
+
3619
+ @dc.dataclass(frozen=True)
3620
+ class ProcessGroupConfig:
3621
+ name: str
3622
+
3623
+ priority: int = 999
3624
+
3625
+ processes: ta.Optional[ta.Sequence[ProcessConfig]] = None
3626
+
3627
+
3628
+ @dc.dataclass(frozen=True)
3629
+ class ServerConfig:
3630
+ user: ta.Optional[str] = None
3631
+ nodaemon: bool = False
3632
+ umask: int = 0o22
3633
+ directory: ta.Optional[str] = None
3634
+ logfile: str = 'supervisord.log'
3635
+ logfile_maxbytes: int = 50 * 1024 * 1024
3636
+ logfile_backups: int = 10
3637
+ loglevel: int = logging.INFO
3638
+ pidfile: str = 'supervisord.pid'
3639
+ identifier: str = 'supervisor'
3640
+ child_logdir: str = '/dev/null'
3641
+ minfds: int = 1024
3642
+ minprocs: int = 200
3643
+ nocleanup: bool = False
3644
+ strip_ansi: bool = False
3645
+ silent: bool = False
3646
+
3647
+ groups: ta.Optional[ta.Sequence[ProcessGroupConfig]] = None
3648
+
3649
+ @classmethod
3650
+ def new(
3651
+ cls,
3652
+ umask: ta.Union[int, str] = 0o22,
3653
+ directory: ta.Optional[str] = None,
3654
+ logfile: str = 'supervisord.log',
3655
+ logfile_maxbytes: ta.Union[int, str] = 50 * 1024 * 1024,
3656
+ loglevel: ta.Union[int, str] = logging.INFO,
3657
+ pidfile: str = 'supervisord.pid',
3658
+ child_logdir: ta.Optional[str] = None,
3659
+ **kwargs: ta.Any,
3660
+ ) -> 'ServerConfig':
3661
+ return cls(
3662
+ umask=octal_type(umask),
3663
+ directory=existing_directory(directory) if directory is not None else None,
3664
+ logfile=existing_dirpath(logfile),
3665
+ logfile_maxbytes=byte_size(logfile_maxbytes),
3666
+ loglevel=logging_level(loglevel),
3667
+ pidfile=existing_dirpath(pidfile),
3668
+ child_logdir=child_logdir if child_logdir else tempfile.gettempdir(),
3669
+ **kwargs,
3670
+ )
3671
+
2085
3672
 
2086
- def after_daemonize(self) -> None:
2087
- self._kqueue = select.kqueue()
2088
- for fd in self._readables:
2089
- self.register_readable(fd)
2090
- for fd in self._writables:
2091
- self.register_writable(fd)
3673
+ ##
2092
3674
 
2093
- def close(self) -> None:
2094
- self._kqueue.close() # type: ignore
2095
- self._kqueue = None
2096
3675
 
2097
- else:
2098
- KqueuePoller = None
3676
+ def prepare_process_group_config(dct: ConfigMapping) -> ConfigMapping:
3677
+ out = dict(dct)
3678
+ out['processes'] = build_config_named_children(out.get('processes'))
3679
+ return out
2099
3680
 
2100
3681
 
2101
- Poller: ta.Type[BasePoller]
2102
- if (
2103
- sys.platform == 'darwin' or sys.platform.startswith('freebsd') and
2104
- hasattr(select, 'kqueue') and KqueuePoller is not None
2105
- ):
2106
- Poller = KqueuePoller
2107
- elif hasattr(select, 'poll'):
2108
- Poller = PollPoller
2109
- else:
2110
- Poller = SelectPoller
3682
+ def prepare_server_config(dct: ta.Mapping[str, ta.Any]) -> ta.Mapping[str, ta.Any]:
3683
+ out = dict(dct)
3684
+ group_dcts = build_config_named_children(out.get('groups'))
3685
+ out['groups'] = [prepare_process_group_config(group_dct) for group_dct in group_dcts or []]
3686
+ return out
2111
3687
 
2112
3688
 
2113
3689
  ########################################
@@ -2134,6 +3710,11 @@ class AbstractServerContext(abc.ABC):
2134
3710
  def pid_history(self) -> ta.Dict[int, 'AbstractSubprocess']:
2135
3711
  raise NotImplementedError
2136
3712
 
3713
+ @property
3714
+ @abc.abstractmethod
3715
+ def inherited_fds(self) -> ta.FrozenSet[int]:
3716
+ raise NotImplementedError
3717
+
2137
3718
 
2138
3719
  class AbstractSubprocess(abc.ABC):
2139
3720
  @property
@@ -2165,12 +3746,14 @@ class ServerContext(AbstractServerContext):
2165
3746
  self,
2166
3747
  config: ServerConfig,
2167
3748
  *,
2168
- epoch: int = 0,
3749
+ epoch: ServerEpoch = ServerEpoch(0),
3750
+ inherited_fds: ta.Optional[InheritedFds] = None,
2169
3751
  ) -> None:
2170
3752
  super().__init__()
2171
3753
 
2172
3754
  self._config = config
2173
3755
  self._epoch = epoch
3756
+ self._inherited_fds = InheritedFds(frozenset(inherited_fds or []))
2174
3757
 
2175
3758
  self._pid_history: ta.Dict[int, AbstractSubprocess] = {}
2176
3759
  self._state: SupervisorState = SupervisorStates.RUNNING
@@ -2194,7 +3777,7 @@ class ServerContext(AbstractServerContext):
2194
3777
  return self._config
2195
3778
 
2196
3779
  @property
2197
- def epoch(self) -> int:
3780
+ def epoch(self) -> ServerEpoch:
2198
3781
  return self._epoch
2199
3782
 
2200
3783
  @property
@@ -2224,6 +3807,10 @@ class ServerContext(AbstractServerContext):
2224
3807
  def gid(self) -> ta.Optional[int]:
2225
3808
  return self._gid
2226
3809
 
3810
+ @property
3811
+ def inherited_fds(self) -> InheritedFds:
3812
+ return self._inherited_fds
3813
+
2227
3814
  ##
2228
3815
 
2229
3816
  def set_signals(self) -> None:
@@ -2752,10 +4339,10 @@ class OutputDispatcher(Dispatcher):
2752
4339
 
2753
4340
  if self._channel == 'stdout':
2754
4341
  if self._stdout_events_enabled:
2755
- notify_event(ProcessLogStdoutEvent(self._process, self._process.pid, data))
4342
+ EVENT_CALLBACKS.notify(ProcessLogStdoutEvent(self._process, self._process.pid, data))
2756
4343
 
2757
4344
  elif self._stderr_events_enabled:
2758
- notify_event(ProcessLogStderrEvent(self._process, self._process.pid, data))
4345
+ EVENT_CALLBACKS.notify(ProcessLogStderrEvent(self._process, self._process.pid, data))
2759
4346
 
2760
4347
  def record_output(self):
2761
4348
  if self._capture_log is None:
@@ -2806,7 +4393,7 @@ class OutputDispatcher(Dispatcher):
2806
4393
  channel = self._channel
2807
4394
  procname = self._process.config.name
2808
4395
  event = self.event_type(self._process, self._process.pid, data)
2809
- notify_event(event)
4396
+ EVENT_CALLBACKS.notify(event)
2810
4397
 
2811
4398
  log.debug('%r %s emitted a comm event', procname, channel)
2812
4399
  for handler in self._capture_log.handlers:
@@ -2867,6 +4454,9 @@ class InputDispatcher(Dispatcher):
2867
4454
  # ../process.py
2868
4455
 
2869
4456
 
4457
+ ##
4458
+
4459
+
2870
4460
  @functools.total_ordering
2871
4461
  class Subprocess(AbstractSubprocess):
2872
4462
  """A class to manage a subprocess."""
@@ -2892,7 +4482,12 @@ class Subprocess(AbstractSubprocess):
2892
4482
  spawn_err = None # error message attached by spawn() if any
2893
4483
  group = None # ProcessGroup instance if process is in the group
2894
4484
 
2895
- def __init__(self, config: ProcessConfig, group: 'ProcessGroup', context: AbstractServerContext) -> None:
4485
+ def __init__(
4486
+ self,
4487
+ config: ProcessConfig,
4488
+ group: 'ProcessGroup',
4489
+ context: AbstractServerContext,
4490
+ ) -> None:
2896
4491
  super().__init__()
2897
4492
  self._config = config
2898
4493
  self.group = group
@@ -3019,7 +4614,7 @@ class Subprocess(AbstractSubprocess):
3019
4614
  event_class = self.event_map.get(new_state)
3020
4615
  if event_class is not None:
3021
4616
  event = event_class(self, old_state, expected)
3022
- notify_event(event)
4617
+ EVENT_CALLBACKS.notify(event)
3023
4618
 
3024
4619
  return True
3025
4620
 
@@ -3136,7 +4731,10 @@ class Subprocess(AbstractSubprocess):
3136
4731
  os.dup2(self._pipes['child_stdout'], 2)
3137
4732
  else:
3138
4733
  os.dup2(self._pipes['child_stderr'], 2)
4734
+
3139
4735
  for i in range(3, self.context.config.minfds):
4736
+ if i in self.context.inherited_fds:
4737
+ continue
3140
4738
  close_fd(i)
3141
4739
 
3142
4740
  def _spawn_as_child(self, filename: str, argv: ta.Sequence[str]) -> None:
@@ -3171,7 +4769,7 @@ class Subprocess(AbstractSubprocess):
3171
4769
  cwd = self.config.directory
3172
4770
  try:
3173
4771
  if cwd is not None:
3174
- os.chdir(cwd)
4772
+ os.chdir(os.path.expanduser(cwd))
3175
4773
  except OSError as why:
3176
4774
  code = errno.errorcode.get(why.args[0], why.args[0])
3177
4775
  msg = f"couldn't chdir to {cwd}: {code}\n"
@@ -3227,7 +4825,7 @@ class Subprocess(AbstractSubprocess):
3227
4825
  return self.kill(self.config.stopsignal)
3228
4826
 
3229
4827
  def stop_report(self) -> None:
3230
- """ Log a 'waiting for x to stop' message with throttling. """
4828
+ """Log a 'waiting for x to stop' message with throttling."""
3231
4829
  if self.state == ProcessStates.STOPPING:
3232
4830
  now = time.time()
3233
4831
 
@@ -3357,7 +4955,7 @@ class Subprocess(AbstractSubprocess):
3357
4955
  return None
3358
4956
 
3359
4957
  def finish(self, sts: int) -> None:
3360
- """ The process was reaped and we need to report and manage its state """
4958
+ """The process was reaped and we need to report and manage its state."""
3361
4959
 
3362
4960
  self.drain()
3363
4961
 
@@ -3438,7 +5036,7 @@ class Subprocess(AbstractSubprocess):
3438
5036
  # system that this event was rejected so it can be processed again.
3439
5037
  if self.event is not None:
3440
5038
  # Note: this should only be true if we were in the BUSY state when finish() was called.
3441
- notify_event(EventRejectedEvent(self, self.event)) # type: ignore
5039
+ EVENT_CALLBACKS.notify(EventRejectedEvent(self, self.event)) # type: ignore
3442
5040
  self.event = None
3443
5041
 
3444
5042
  def set_uid(self) -> ta.Optional[str]:
@@ -3530,15 +5128,39 @@ class Subprocess(AbstractSubprocess):
3530
5128
  pass
3531
5129
 
3532
5130
 
5131
+ ##
5132
+
5133
+
5134
+ @dc.dataclass(frozen=True)
5135
+ class SubprocessFactory:
5136
+ fn: ta.Callable[[ProcessConfig, 'ProcessGroup'], Subprocess]
5137
+
5138
+ def __call__(self, config: ProcessConfig, group: 'ProcessGroup') -> Subprocess:
5139
+ return self.fn(config, group)
5140
+
5141
+
3533
5142
  @functools.total_ordering
3534
5143
  class ProcessGroup:
3535
- def __init__(self, config: ProcessGroupConfig, context: ServerContext):
5144
+ def __init__(
5145
+ self,
5146
+ config: ProcessGroupConfig,
5147
+ context: ServerContext,
5148
+ *,
5149
+ subprocess_factory: ta.Optional[SubprocessFactory] = None,
5150
+ ):
3536
5151
  super().__init__()
3537
5152
  self.config = config
3538
5153
  self.context = context
5154
+
5155
+ if subprocess_factory is None:
5156
+ def make_subprocess(config: ProcessConfig, group: ProcessGroup) -> Subprocess:
5157
+ return Subprocess(config, group, self.context)
5158
+ subprocess_factory = SubprocessFactory(make_subprocess)
5159
+ self._subprocess_factory = subprocess_factory
5160
+
3539
5161
  self.processes = {}
3540
5162
  for pconfig in self.config.processes or []:
3541
- process = Subprocess(pconfig, self, self.context)
5163
+ process = self._subprocess_factory(pconfig, self)
3542
5164
  self.processes[pconfig.name] = process
3543
5165
 
3544
5166
  def __lt__(self, other):
@@ -3604,22 +5226,44 @@ class ProcessGroup:
3604
5226
  # ../supervisor.py
3605
5227
 
3606
5228
 
3607
- def timeslice(period, when):
5229
+ def timeslice(period: int, when: float) -> int:
3608
5230
  return int(when - (when % period))
3609
5231
 
3610
5232
 
5233
+ @dc.dataclass(frozen=True)
5234
+ class ProcessGroupFactory:
5235
+ fn: ta.Callable[[ProcessGroupConfig], ProcessGroup]
5236
+
5237
+ def __call__(self, config: ProcessGroupConfig) -> ProcessGroup:
5238
+ return self.fn(config)
5239
+
5240
+
3611
5241
  class Supervisor:
3612
5242
 
3613
- def __init__(self, context: ServerContext) -> None:
5243
+ def __init__(
5244
+ self,
5245
+ context: ServerContext,
5246
+ *,
5247
+ process_group_factory: ta.Optional[ProcessGroupFactory] = None,
5248
+ ) -> None:
3614
5249
  super().__init__()
3615
5250
 
3616
5251
  self._context = context
5252
+
5253
+ if process_group_factory is None:
5254
+ def make_process_group(config: ProcessGroupConfig) -> ProcessGroup:
5255
+ return ProcessGroup(config, self._context)
5256
+ process_group_factory = ProcessGroupFactory(make_process_group)
5257
+ self._process_group_factory = process_group_factory
5258
+
3617
5259
  self._ticks: ta.Dict[int, float] = {}
3618
5260
  self._process_groups: ta.Dict[str, ProcessGroup] = {} # map of process group name to process group object
3619
5261
  self._stop_groups: ta.Optional[ta.List[ProcessGroup]] = None # list used for priority ordered shutdown
3620
5262
  self._stopping = False # set after we detect that we are handling a stop request
3621
5263
  self._last_shutdown_report = 0. # throttle for delayed process error reports at stop
3622
5264
 
5265
+ #
5266
+
3623
5267
  @property
3624
5268
  def context(self) -> ServerContext:
3625
5269
  return self._context
@@ -3627,58 +5271,7 @@ class Supervisor:
3627
5271
  def get_state(self) -> SupervisorState:
3628
5272
  return self._context.state
3629
5273
 
3630
- def main(self) -> None:
3631
- self.setup()
3632
- self.run()
3633
-
3634
- @cached_nullary
3635
- def setup(self) -> None:
3636
- if not self._context.first:
3637
- # prevent crash on libdispatch-based systems, at least for the first request
3638
- self._context.cleanup_fds()
3639
-
3640
- self._context.set_uid_or_exit()
3641
-
3642
- if self._context.first:
3643
- self._context.set_rlimits_or_exit()
3644
-
3645
- # this sets the options.logger object delay logger instantiation until after setuid
3646
- if not self._context.config.nocleanup:
3647
- # clean up old automatic logs
3648
- self._context.clear_auto_child_logdir()
3649
-
3650
- def run(
3651
- self,
3652
- *,
3653
- callback: ta.Optional[ta.Callable[['Supervisor'], bool]] = None,
3654
- ) -> None:
3655
- self._process_groups = {} # clear
3656
- self._stop_groups = None # clear
3657
-
3658
- clear_events()
3659
-
3660
- try:
3661
- for config in self._context.config.groups or []:
3662
- self.add_process_group(config)
3663
-
3664
- self._context.set_signals()
3665
-
3666
- if not self._context.config.nodaemon and self._context.first:
3667
- self._context.daemonize()
3668
-
3669
- # writing pid file needs to come *after* daemonizing or pid will be wrong
3670
- self._context.write_pidfile()
3671
-
3672
- notify_event(SupervisorRunningEvent())
3673
-
3674
- while True:
3675
- if callback is not None and not callback(self):
3676
- break
3677
-
3678
- self._run_once()
3679
-
3680
- finally:
3681
- self._context.cleanup()
5274
+ #
3682
5275
 
3683
5276
  class DiffToActive(ta.NamedTuple):
3684
5277
  added: ta.List[ProcessGroupConfig]
@@ -3704,10 +5297,10 @@ class Supervisor:
3704
5297
  if name in self._process_groups:
3705
5298
  return False
3706
5299
 
3707
- group = self._process_groups[name] = ProcessGroup(config, self._context)
5300
+ group = self._process_groups[name] = self._process_group_factory(config)
3708
5301
  group.after_setuid()
3709
5302
 
3710
- notify_event(ProcessGroupAddedEvent(name))
5303
+ EVENT_CALLBACKS.notify(ProcessGroupAddedEvent(name))
3711
5304
  return True
3712
5305
 
3713
5306
  def remove_process_group(self, name: str) -> bool:
@@ -3718,7 +5311,7 @@ class Supervisor:
3718
5311
 
3719
5312
  del self._process_groups[name]
3720
5313
 
3721
- notify_event(ProcessGroupRemovedEvent(name))
5314
+ EVENT_CALLBACKS.notify(ProcessGroupRemovedEvent(name))
3722
5315
  return True
3723
5316
 
3724
5317
  def get_process_map(self) -> ta.Dict[int, Dispatcher]:
@@ -3747,6 +5340,72 @@ class Supervisor:
3747
5340
 
3748
5341
  return unstopped
3749
5342
 
5343
+ #
5344
+
5345
+ def main(self) -> None:
5346
+ self.setup()
5347
+ self.run()
5348
+
5349
+ @cached_nullary
5350
+ def setup(self) -> None:
5351
+ if not self._context.first:
5352
+ # prevent crash on libdispatch-based systems, at least for the first request
5353
+ self._context.cleanup_fds()
5354
+
5355
+ self._context.set_uid_or_exit()
5356
+
5357
+ if self._context.first:
5358
+ self._context.set_rlimits_or_exit()
5359
+
5360
+ # this sets the options.logger object delay logger instantiation until after setuid
5361
+ if not self._context.config.nocleanup:
5362
+ # clean up old automatic logs
5363
+ self._context.clear_auto_child_logdir()
5364
+
5365
+ def run(
5366
+ self,
5367
+ *,
5368
+ callback: ta.Optional[ta.Callable[['Supervisor'], bool]] = None,
5369
+ ) -> None:
5370
+ self._process_groups = {} # clear
5371
+ self._stop_groups = None # clear
5372
+
5373
+ EVENT_CALLBACKS.clear()
5374
+
5375
+ try:
5376
+ for config in self._context.config.groups or []:
5377
+ self.add_process_group(config)
5378
+
5379
+ self._context.set_signals()
5380
+
5381
+ if not self._context.config.nodaemon and self._context.first:
5382
+ self._context.daemonize()
5383
+
5384
+ # writing pid file needs to come *after* daemonizing or pid will be wrong
5385
+ self._context.write_pidfile()
5386
+
5387
+ EVENT_CALLBACKS.notify(SupervisorRunningEvent())
5388
+
5389
+ while True:
5390
+ if callback is not None and not callback(self):
5391
+ break
5392
+
5393
+ self._run_once()
5394
+
5395
+ finally:
5396
+ self._context.cleanup()
5397
+
5398
+ #
5399
+
5400
+ def _run_once(self) -> None:
5401
+ self._poll()
5402
+ self._reap()
5403
+ self._handle_signal()
5404
+ self._tick()
5405
+
5406
+ if self._context.state < SupervisorStates.RUNNING:
5407
+ self._ordered_stop_groups_phase_2()
5408
+
3750
5409
  def _ordered_stop_groups_phase_1(self) -> None:
3751
5410
  if self._stop_groups:
3752
5411
  # stop the last group (the one with the "highest" priority)
@@ -3763,7 +5422,7 @@ class Supervisor:
3763
5422
  # down, so push it back on to the end of the stop group queue
3764
5423
  self._stop_groups.append(group)
3765
5424
 
3766
- def _run_once(self) -> None:
5425
+ def _poll(self) -> None:
3767
5426
  combined_map = {}
3768
5427
  combined_map.update(self.get_process_map())
3769
5428
 
@@ -3775,7 +5434,7 @@ class Supervisor:
3775
5434
  # first time, set the stopping flag, do a notification and set stop_groups
3776
5435
  self._stopping = True
3777
5436
  self._stop_groups = pgroups[:]
3778
- notify_event(SupervisorStoppingEvent())
5437
+ EVENT_CALLBACKS.notify(SupervisorStoppingEvent())
3779
5438
 
3780
5439
  self._ordered_stop_groups_phase_1()
3781
5440
 
@@ -3835,33 +5494,6 @@ class Supervisor:
3835
5494
  for group in pgroups:
3836
5495
  group.transition()
3837
5496
 
3838
- self._reap()
3839
- self._handle_signal()
3840
- self._tick()
3841
-
3842
- if self._context.state < SupervisorStates.RUNNING:
3843
- self._ordered_stop_groups_phase_2()
3844
-
3845
- def _tick(self, now: ta.Optional[float] = None) -> None:
3846
- """Send one or more 'tick' events when the timeslice related to the period for the event type rolls over"""
3847
-
3848
- if now is None:
3849
- # now won't be None in unit tests
3850
- now = time.time()
3851
-
3852
- for event in TICK_EVENTS:
3853
- period = event.period # type: ignore
3854
-
3855
- last_tick = self._ticks.get(period)
3856
- if last_tick is None:
3857
- # we just started up
3858
- last_tick = self._ticks[period] = timeslice(period, now)
3859
-
3860
- this_tick = timeslice(period, now)
3861
- if this_tick != last_tick:
3862
- self._ticks[period] = this_tick
3863
- notify_event(event(this_tick, self))
3864
-
3865
5497
  def _reap(self, *, once: bool = False, depth: int = 0) -> None:
3866
5498
  if depth >= 100:
3867
5499
  return
@@ -3910,11 +5542,78 @@ class Supervisor:
3910
5542
  else:
3911
5543
  log.debug('received %s indicating nothing', signame(sig))
3912
5544
 
5545
+ def _tick(self, now: ta.Optional[float] = None) -> None:
5546
+ """Send one or more 'tick' events when the timeslice related to the period for the event type rolls over"""
5547
+
5548
+ if now is None:
5549
+ # now won't be None in unit tests
5550
+ now = time.time()
5551
+
5552
+ for event in TICK_EVENTS:
5553
+ period = event.period # type: ignore
5554
+
5555
+ last_tick = self._ticks.get(period)
5556
+ if last_tick is None:
5557
+ # we just started up
5558
+ last_tick = self._ticks[period] = timeslice(period, now)
5559
+
5560
+ this_tick = timeslice(period, now)
5561
+ if this_tick != last_tick:
5562
+ self._ticks[period] = this_tick
5563
+ EVENT_CALLBACKS.notify(event(this_tick, self))
5564
+
3913
5565
 
3914
5566
  ########################################
3915
5567
  # main.py
3916
5568
 
3917
5569
 
5570
+ ##
5571
+
5572
+
5573
+ def build_server_bindings(
5574
+ config: ServerConfig,
5575
+ *,
5576
+ server_epoch: ta.Optional[ServerEpoch] = None,
5577
+ inherited_fds: ta.Optional[InheritedFds] = None,
5578
+ ) -> InjectorBindings:
5579
+ lst: ta.List[InjectorBindingOrBindings] = [
5580
+ inj.bind(config),
5581
+
5582
+ inj.bind(ServerContext, singleton=True),
5583
+ inj.bind(AbstractServerContext, to_key=ServerContext),
5584
+
5585
+ inj.bind(Supervisor, singleton=True),
5586
+ ]
5587
+
5588
+ #
5589
+
5590
+ def make_process_group_factory(injector: Injector) -> ProcessGroupFactory:
5591
+ def inner(group_config: ProcessGroupConfig) -> ProcessGroup:
5592
+ return injector.inject(functools.partial(ProcessGroup, group_config))
5593
+ return ProcessGroupFactory(inner)
5594
+ lst.append(inj.bind(make_process_group_factory))
5595
+
5596
+ def make_subprocess_factory(injector: Injector) -> SubprocessFactory:
5597
+ def inner(process_config: ProcessConfig, group: ProcessGroup) -> Subprocess:
5598
+ return injector.inject(functools.partial(Subprocess, process_config, group))
5599
+ return SubprocessFactory(inner)
5600
+ lst.append(inj.bind(make_subprocess_factory))
5601
+
5602
+ #
5603
+
5604
+ if server_epoch is not None:
5605
+ lst.append(inj.bind(server_epoch, key=ServerEpoch))
5606
+ if inherited_fds is not None:
5607
+ lst.append(inj.bind(inherited_fds, key=InheritedFds))
5608
+
5609
+ #
5610
+
5611
+ return inj.as_bindings(*lst)
5612
+
5613
+
5614
+ ##
5615
+
5616
+
3918
5617
  def main(
3919
5618
  argv: ta.Optional[ta.Sequence[str]] = None,
3920
5619
  *,
@@ -3925,6 +5624,7 @@ def main(
3925
5624
  parser = argparse.ArgumentParser()
3926
5625
  parser.add_argument('config_file', metavar='config-file')
3927
5626
  parser.add_argument('--no-journald', action='store_true')
5627
+ parser.add_argument('--inherit-initial-fds', action='store_true')
3928
5628
  args = parser.parse_args(argv)
3929
5629
 
3930
5630
  #
@@ -3940,20 +5640,27 @@ def main(
3940
5640
 
3941
5641
  #
3942
5642
 
5643
+ initial_fds: ta.Optional[InheritedFds] = None
5644
+ if args.inherit_initial_fds:
5645
+ initial_fds = InheritedFds(get_open_fds(0x10000))
5646
+
3943
5647
  # if we hup, restart by making a new Supervisor()
3944
5648
  for epoch in itertools.count():
3945
- with open(cf) as f:
3946
- config_src = f.read()
3947
-
3948
- config_dct = json.loads(config_src)
3949
- config: ServerConfig = unmarshal_obj(config_dct, ServerConfig)
5649
+ config = read_config_file(
5650
+ os.path.expanduser(cf),
5651
+ ServerConfig,
5652
+ prepare=prepare_server_config,
5653
+ )
3950
5654
 
3951
- context = ServerContext(
5655
+ injector = inj.create_injector(build_server_bindings(
3952
5656
  config,
3953
- epoch=epoch,
3954
- )
5657
+ server_epoch=ServerEpoch(epoch),
5658
+ inherited_fds=initial_fds,
5659
+ ))
5660
+
5661
+ context = injector.provide(ServerContext)
5662
+ supervisor = injector.provide(Supervisor)
3955
5663
 
3956
- supervisor = Supervisor(context)
3957
5664
  try:
3958
5665
  supervisor.main()
3959
5666
  except ExitNow: