ominfra 0.0.0.dev90__py3-none-any.whl → 0.0.0.dev92__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- ominfra/clouds/aws/journald2aws/main.py +37 -20
- ominfra/clouds/aws/logs.py +2 -2
- ominfra/journald/__init__.py +0 -0
- ominfra/{clouds/aws/journald2aws/journald → journald}/messages.py +27 -15
- ominfra/journald/tailer.py +453 -0
- ominfra/scripts/journald2aws.py +549 -148
- ominfra/scripts/supervisor.py +484 -112
- ominfra/supervisor/compat.py +6 -2
- ominfra/supervisor/configs.py +54 -54
- ominfra/supervisor/context.py +2 -0
- ominfra/supervisor/datatypes.py +4 -0
- ominfra/supervisor/dispatchers.py +28 -16
- ominfra/supervisor/events.py +6 -7
- ominfra/supervisor/exceptions.py +7 -5
- ominfra/supervisor/process.py +14 -6
- ominfra/supervisor/supervisor.py +15 -29
- ominfra/{clouds/aws/journald2aws/threadworker.py → threadworker.py} +6 -3
- {ominfra-0.0.0.dev90.dist-info → ominfra-0.0.0.dev92.dist-info}/METADATA +4 -4
- {ominfra-0.0.0.dev90.dist-info → ominfra-0.0.0.dev92.dist-info}/RECORD +24 -24
- ominfra/clouds/aws/journald2aws/journald/__init__.py +0 -1
- ominfra/clouds/aws/journald2aws/journald/tailer.py +0 -108
- /ominfra/{clouds/aws/journald2aws/journald → journald}/genmessages.py +0 -0
- {ominfra-0.0.0.dev90.dist-info → ominfra-0.0.0.dev92.dist-info}/LICENSE +0 -0
- {ominfra-0.0.0.dev90.dist-info → ominfra-0.0.0.dev92.dist-info}/WHEEL +0 -0
- {ominfra-0.0.0.dev90.dist-info → ominfra-0.0.0.dev92.dist-info}/entry_points.txt +0 -0
- {ominfra-0.0.0.dev90.dist-info → ominfra-0.0.0.dev92.dist-info}/top_level.txt +0 -0
ominfra/scripts/supervisor.py
CHANGED
@@ -5,11 +5,16 @@
|
|
5
5
|
# @omlish-amalg-output ../supervisor/supervisor.py
|
6
6
|
# ruff: noqa: N802 UP006 UP007 UP036
|
7
7
|
import abc
|
8
|
+
import base64
|
9
|
+
import collections.abc
|
8
10
|
import contextlib
|
9
11
|
import dataclasses as dc
|
10
12
|
import datetime
|
13
|
+
import decimal
|
14
|
+
import enum
|
11
15
|
import errno
|
12
16
|
import fcntl
|
17
|
+
import fractions
|
13
18
|
import functools
|
14
19
|
import grp
|
15
20
|
import json
|
@@ -29,7 +34,9 @@ import time
|
|
29
34
|
import traceback
|
30
35
|
import types
|
31
36
|
import typing as ta
|
37
|
+
import uuid
|
32
38
|
import warnings
|
39
|
+
import weakref # noqa
|
33
40
|
|
34
41
|
|
35
42
|
########################################
|
@@ -76,9 +83,10 @@ def compact_traceback() -> ta.Tuple[
|
|
76
83
|
types.TracebackType,
|
77
84
|
]:
|
78
85
|
t, v, tb = sys.exc_info()
|
79
|
-
tbinfo = []
|
80
86
|
if not tb:
|
81
87
|
raise RuntimeError('No traceback')
|
88
|
+
|
89
|
+
tbinfo = []
|
82
90
|
while tb:
|
83
91
|
tbinfo.append((
|
84
92
|
tb.tb_frame.f_code.co_filename,
|
@@ -116,6 +124,7 @@ def decode_wait_status(sts: int) -> ta.Tuple[int, str]:
|
|
116
124
|
Return a tuple (exitstatus, message) where exitstatus is the exit status, or -1 if the process was killed by a
|
117
125
|
signal; and message is a message telling what happened. It is the caller's responsibility to display the message.
|
118
126
|
"""
|
127
|
+
|
119
128
|
if os.WIFEXITED(sts):
|
120
129
|
es = os.WEXITSTATUS(sts) & 0xffff
|
121
130
|
msg = f'exit status {es}'
|
@@ -215,6 +224,7 @@ def real_exit(code: int) -> None:
|
|
215
224
|
|
216
225
|
def get_path() -> ta.Sequence[str]:
|
217
226
|
"""Return a list corresponding to $PATH, or a default."""
|
227
|
+
|
218
228
|
path = ['/bin', '/usr/bin', '/usr/local/bin']
|
219
229
|
if 'PATH' in os.environ:
|
220
230
|
p = os.environ['PATH']
|
@@ -231,8 +241,9 @@ ANSI_ESCAPE_BEGIN = b'\x1b['
|
|
231
241
|
ANSI_TERMINATORS = (b'H', b'f', b'A', b'B', b'C', b'D', b'R', b's', b'u', b'J', b'K', b'h', b'l', b'p', b'm')
|
232
242
|
|
233
243
|
|
234
|
-
def strip_escapes(s):
|
244
|
+
def strip_escapes(s: bytes) -> bytes:
|
235
245
|
"""Remove all ANSI color escapes from the given string."""
|
246
|
+
|
236
247
|
result = b''
|
237
248
|
show = 1
|
238
249
|
i = 0
|
@@ -400,15 +411,19 @@ SIGNUMS = [getattr(signal, k) for k in dir(signal) if k.startswith('SIG')]
|
|
400
411
|
def signal_number(value: ta.Union[int, str]) -> int:
|
401
412
|
try:
|
402
413
|
num = int(value)
|
414
|
+
|
403
415
|
except (ValueError, TypeError):
|
404
416
|
name = value.strip().upper() # type: ignore
|
405
417
|
if not name.startswith('SIG'):
|
406
418
|
name = f'SIG{name}'
|
419
|
+
|
407
420
|
num = getattr(signal, name, None) # type: ignore
|
408
421
|
if num is None:
|
409
422
|
raise ValueError(f'value {value!r} is not a valid signal name') # noqa
|
423
|
+
|
410
424
|
if num not in SIGNUMS:
|
411
425
|
raise ValueError(f'value {value!r} is not a valid signal number')
|
426
|
+
|
412
427
|
return num
|
413
428
|
|
414
429
|
|
@@ -425,20 +440,22 @@ class RestartUnconditionally:
|
|
425
440
|
|
426
441
|
|
427
442
|
class ProcessError(Exception):
|
428
|
-
"""
|
443
|
+
"""Specialized exceptions used when attempting to start a process."""
|
429
444
|
|
430
445
|
|
431
446
|
class BadCommandError(ProcessError):
|
432
|
-
"""
|
447
|
+
"""Indicates the command could not be parsed properly."""
|
433
448
|
|
434
449
|
|
435
450
|
class NotExecutableError(ProcessError):
|
436
|
-
"""
|
437
|
-
resolves to a file which is not executable, or which
|
451
|
+
"""
|
452
|
+
Indicates that the filespec cannot be executed because its path resolves to a file which is not executable, or which
|
453
|
+
is a directory.
|
454
|
+
"""
|
438
455
|
|
439
456
|
|
440
457
|
class NotFoundError(ProcessError):
|
441
|
-
"""
|
458
|
+
"""Indicates that the filespec cannot be executed because it could not be found."""
|
442
459
|
|
443
460
|
|
444
461
|
class NoPermissionError(ProcessError):
|
@@ -773,61 +790,54 @@ json_dumps_compact: ta.Callable[..., str] = functools.partial(json.dumps, **JSON
|
|
773
790
|
|
774
791
|
|
775
792
|
########################################
|
776
|
-
#
|
793
|
+
# ../../../omlish/lite/reflect.py
|
777
794
|
|
778
795
|
|
779
|
-
|
780
|
-
|
781
|
-
|
782
|
-
|
783
|
-
umask: int = 0o22
|
784
|
-
directory: ta.Optional[str] = None
|
785
|
-
logfile: str = 'supervisord.log'
|
786
|
-
logfile_maxbytes: int = 50 * 1024 * 1024
|
787
|
-
logfile_backups: int = 10
|
788
|
-
loglevel: int = logging.INFO
|
789
|
-
pidfile: str = 'supervisord.pid'
|
790
|
-
identifier: str = 'supervisor'
|
791
|
-
child_logdir: str = '/dev/null'
|
792
|
-
minfds: int = 1024
|
793
|
-
minprocs: int = 200
|
794
|
-
nocleanup: bool = False
|
795
|
-
strip_ansi: bool = False
|
796
|
-
silent: bool = False
|
796
|
+
_GENERIC_ALIAS_TYPES = (
|
797
|
+
ta._GenericAlias, # type: ignore # noqa
|
798
|
+
*([ta._SpecialGenericAlias] if hasattr(ta, '_SpecialGenericAlias') else []), # noqa
|
799
|
+
)
|
797
800
|
|
798
|
-
groups: ta.Optional[ta.Sequence['ProcessGroupConfig']] = None
|
799
801
|
|
800
|
-
|
801
|
-
|
802
|
-
|
803
|
-
|
804
|
-
|
805
|
-
logfile: str = 'supervisord.log',
|
806
|
-
logfile_maxbytes: ta.Union[int, str] = 50 * 1024 * 1024,
|
807
|
-
loglevel: ta.Union[int, str] = logging.INFO,
|
808
|
-
pidfile: str = 'supervisord.pid',
|
809
|
-
child_logdir: ta.Optional[str] = None,
|
810
|
-
**kwargs: ta.Any,
|
811
|
-
) -> 'ServerConfig':
|
812
|
-
return cls(
|
813
|
-
umask=octal_type(umask),
|
814
|
-
directory=existing_directory(directory) if directory is not None else None,
|
815
|
-
logfile=existing_dirpath(logfile),
|
816
|
-
logfile_maxbytes=byte_size(logfile_maxbytes),
|
817
|
-
loglevel=logging_level(loglevel),
|
818
|
-
pidfile=existing_dirpath(pidfile),
|
819
|
-
child_logdir=child_logdir if child_logdir else tempfile.gettempdir(),
|
820
|
-
**kwargs,
|
821
|
-
)
|
802
|
+
def is_generic_alias(obj, *, origin: ta.Any = None) -> bool:
|
803
|
+
return (
|
804
|
+
isinstance(obj, _GENERIC_ALIAS_TYPES) and
|
805
|
+
(origin is None or ta.get_origin(obj) is origin)
|
806
|
+
)
|
822
807
|
|
823
808
|
|
824
|
-
|
825
|
-
|
826
|
-
name: str
|
809
|
+
is_union_alias = functools.partial(is_generic_alias, origin=ta.Union)
|
810
|
+
is_callable_alias = functools.partial(is_generic_alias, origin=ta.Callable)
|
827
811
|
|
828
|
-
priority: int = 999
|
829
812
|
|
830
|
-
|
813
|
+
def is_optional_alias(spec: ta.Any) -> bool:
|
814
|
+
return (
|
815
|
+
isinstance(spec, _GENERIC_ALIAS_TYPES) and # noqa
|
816
|
+
ta.get_origin(spec) is ta.Union and
|
817
|
+
len(ta.get_args(spec)) == 2 and
|
818
|
+
any(a in (None, type(None)) for a in ta.get_args(spec))
|
819
|
+
)
|
820
|
+
|
821
|
+
|
822
|
+
def get_optional_alias_arg(spec: ta.Any) -> ta.Any:
|
823
|
+
[it] = [it for it in ta.get_args(spec) if it not in (None, type(None))]
|
824
|
+
return it
|
825
|
+
|
826
|
+
|
827
|
+
def deep_subclasses(cls: ta.Type[T]) -> ta.Iterator[ta.Type[T]]:
|
828
|
+
seen = set()
|
829
|
+
todo = list(reversed(cls.__subclasses__()))
|
830
|
+
while todo:
|
831
|
+
cur = todo.pop()
|
832
|
+
if cur in seen:
|
833
|
+
continue
|
834
|
+
seen.add(cur)
|
835
|
+
yield cur
|
836
|
+
todo.extend(reversed(cur.__subclasses__()))
|
837
|
+
|
838
|
+
|
839
|
+
########################################
|
840
|
+
# ../configs.py
|
831
841
|
|
832
842
|
|
833
843
|
@dc.dataclass(frozen=True)
|
@@ -867,13 +877,67 @@ class ProcessConfig:
|
|
867
877
|
|
868
878
|
killasgroup: bool = False
|
869
879
|
|
870
|
-
exitcodes: ta.
|
880
|
+
exitcodes: ta.Sequence[int] = (0,)
|
871
881
|
|
872
882
|
redirect_stderr: bool = False
|
873
883
|
|
874
884
|
environment: ta.Optional[ta.Mapping[str, str]] = None
|
875
885
|
|
876
886
|
|
887
|
+
@dc.dataclass(frozen=True)
|
888
|
+
class ProcessGroupConfig:
|
889
|
+
name: str
|
890
|
+
|
891
|
+
priority: int = 999
|
892
|
+
|
893
|
+
processes: ta.Optional[ta.Sequence[ProcessConfig]] = None
|
894
|
+
|
895
|
+
|
896
|
+
@dc.dataclass(frozen=True)
|
897
|
+
class ServerConfig:
|
898
|
+
user: ta.Optional[str] = None
|
899
|
+
nodaemon: bool = False
|
900
|
+
umask: int = 0o22
|
901
|
+
directory: ta.Optional[str] = None
|
902
|
+
logfile: str = 'supervisord.log'
|
903
|
+
logfile_maxbytes: int = 50 * 1024 * 1024
|
904
|
+
logfile_backups: int = 10
|
905
|
+
loglevel: int = logging.INFO
|
906
|
+
pidfile: str = 'supervisord.pid'
|
907
|
+
identifier: str = 'supervisor'
|
908
|
+
child_logdir: str = '/dev/null'
|
909
|
+
minfds: int = 1024
|
910
|
+
minprocs: int = 200
|
911
|
+
nocleanup: bool = False
|
912
|
+
strip_ansi: bool = False
|
913
|
+
silent: bool = False
|
914
|
+
|
915
|
+
groups: ta.Optional[ta.Sequence[ProcessGroupConfig]] = None
|
916
|
+
|
917
|
+
@classmethod
|
918
|
+
def new(
|
919
|
+
cls,
|
920
|
+
umask: ta.Union[int, str] = 0o22,
|
921
|
+
directory: ta.Optional[str] = None,
|
922
|
+
logfile: str = 'supervisord.log',
|
923
|
+
logfile_maxbytes: ta.Union[int, str] = 50 * 1024 * 1024,
|
924
|
+
loglevel: ta.Union[int, str] = logging.INFO,
|
925
|
+
pidfile: str = 'supervisord.pid',
|
926
|
+
child_logdir: ta.Optional[str] = None,
|
927
|
+
**kwargs: ta.Any,
|
928
|
+
) -> 'ServerConfig':
|
929
|
+
return cls(
|
930
|
+
umask=octal_type(umask),
|
931
|
+
directory=existing_directory(directory) if directory is not None else None,
|
932
|
+
logfile=existing_dirpath(logfile),
|
933
|
+
logfile_maxbytes=byte_size(logfile_maxbytes),
|
934
|
+
loglevel=logging_level(loglevel),
|
935
|
+
pidfile=existing_dirpath(pidfile),
|
936
|
+
child_logdir=child_logdir if child_logdir else tempfile.gettempdir(),
|
937
|
+
**kwargs,
|
938
|
+
)
|
939
|
+
|
940
|
+
|
877
941
|
########################################
|
878
942
|
# ../states.py
|
879
943
|
|
@@ -1214,6 +1278,309 @@ def configure_standard_logging(
|
|
1214
1278
|
return StandardLogHandler(handler)
|
1215
1279
|
|
1216
1280
|
|
1281
|
+
########################################
|
1282
|
+
# ../../../omlish/lite/marshal.py
|
1283
|
+
"""
|
1284
|
+
TODO:
|
1285
|
+
- pickle stdlib objs? have to pin to 3.8 pickle protocol, will be cross-version
|
1286
|
+
- nonstrict toggle
|
1287
|
+
"""
|
1288
|
+
|
1289
|
+
|
1290
|
+
##
|
1291
|
+
|
1292
|
+
|
1293
|
+
class ObjMarshaler(abc.ABC):
|
1294
|
+
@abc.abstractmethod
|
1295
|
+
def marshal(self, o: ta.Any) -> ta.Any:
|
1296
|
+
raise NotImplementedError
|
1297
|
+
|
1298
|
+
@abc.abstractmethod
|
1299
|
+
def unmarshal(self, o: ta.Any) -> ta.Any:
|
1300
|
+
raise NotImplementedError
|
1301
|
+
|
1302
|
+
|
1303
|
+
class NopObjMarshaler(ObjMarshaler):
|
1304
|
+
def marshal(self, o: ta.Any) -> ta.Any:
|
1305
|
+
return o
|
1306
|
+
|
1307
|
+
def unmarshal(self, o: ta.Any) -> ta.Any:
|
1308
|
+
return o
|
1309
|
+
|
1310
|
+
|
1311
|
+
@dc.dataclass()
|
1312
|
+
class ProxyObjMarshaler(ObjMarshaler):
|
1313
|
+
m: ta.Optional[ObjMarshaler] = None
|
1314
|
+
|
1315
|
+
def marshal(self, o: ta.Any) -> ta.Any:
|
1316
|
+
return check_not_none(self.m).marshal(o)
|
1317
|
+
|
1318
|
+
def unmarshal(self, o: ta.Any) -> ta.Any:
|
1319
|
+
return check_not_none(self.m).unmarshal(o)
|
1320
|
+
|
1321
|
+
|
1322
|
+
@dc.dataclass(frozen=True)
|
1323
|
+
class CastObjMarshaler(ObjMarshaler):
|
1324
|
+
ty: type
|
1325
|
+
|
1326
|
+
def marshal(self, o: ta.Any) -> ta.Any:
|
1327
|
+
return o
|
1328
|
+
|
1329
|
+
def unmarshal(self, o: ta.Any) -> ta.Any:
|
1330
|
+
return self.ty(o)
|
1331
|
+
|
1332
|
+
|
1333
|
+
class DynamicObjMarshaler(ObjMarshaler):
|
1334
|
+
def marshal(self, o: ta.Any) -> ta.Any:
|
1335
|
+
return marshal_obj(o)
|
1336
|
+
|
1337
|
+
def unmarshal(self, o: ta.Any) -> ta.Any:
|
1338
|
+
return o
|
1339
|
+
|
1340
|
+
|
1341
|
+
@dc.dataclass(frozen=True)
|
1342
|
+
class Base64ObjMarshaler(ObjMarshaler):
|
1343
|
+
ty: type
|
1344
|
+
|
1345
|
+
def marshal(self, o: ta.Any) -> ta.Any:
|
1346
|
+
return base64.b64encode(o).decode('ascii')
|
1347
|
+
|
1348
|
+
def unmarshal(self, o: ta.Any) -> ta.Any:
|
1349
|
+
return self.ty(base64.b64decode(o))
|
1350
|
+
|
1351
|
+
|
1352
|
+
@dc.dataclass(frozen=True)
|
1353
|
+
class EnumObjMarshaler(ObjMarshaler):
|
1354
|
+
ty: type
|
1355
|
+
|
1356
|
+
def marshal(self, o: ta.Any) -> ta.Any:
|
1357
|
+
return o.name
|
1358
|
+
|
1359
|
+
def unmarshal(self, o: ta.Any) -> ta.Any:
|
1360
|
+
return self.ty.__members__[o] # type: ignore
|
1361
|
+
|
1362
|
+
|
1363
|
+
@dc.dataclass(frozen=True)
|
1364
|
+
class OptionalObjMarshaler(ObjMarshaler):
|
1365
|
+
item: ObjMarshaler
|
1366
|
+
|
1367
|
+
def marshal(self, o: ta.Any) -> ta.Any:
|
1368
|
+
if o is None:
|
1369
|
+
return None
|
1370
|
+
return self.item.marshal(o)
|
1371
|
+
|
1372
|
+
def unmarshal(self, o: ta.Any) -> ta.Any:
|
1373
|
+
if o is None:
|
1374
|
+
return None
|
1375
|
+
return self.item.unmarshal(o)
|
1376
|
+
|
1377
|
+
|
1378
|
+
@dc.dataclass(frozen=True)
|
1379
|
+
class MappingObjMarshaler(ObjMarshaler):
|
1380
|
+
ty: type
|
1381
|
+
km: ObjMarshaler
|
1382
|
+
vm: ObjMarshaler
|
1383
|
+
|
1384
|
+
def marshal(self, o: ta.Any) -> ta.Any:
|
1385
|
+
return {self.km.marshal(k): self.vm.marshal(v) for k, v in o.items()}
|
1386
|
+
|
1387
|
+
def unmarshal(self, o: ta.Any) -> ta.Any:
|
1388
|
+
return self.ty((self.km.unmarshal(k), self.vm.unmarshal(v)) for k, v in o.items())
|
1389
|
+
|
1390
|
+
|
1391
|
+
@dc.dataclass(frozen=True)
|
1392
|
+
class IterableObjMarshaler(ObjMarshaler):
|
1393
|
+
ty: type
|
1394
|
+
item: ObjMarshaler
|
1395
|
+
|
1396
|
+
def marshal(self, o: ta.Any) -> ta.Any:
|
1397
|
+
return [self.item.marshal(e) for e in o]
|
1398
|
+
|
1399
|
+
def unmarshal(self, o: ta.Any) -> ta.Any:
|
1400
|
+
return self.ty(self.item.unmarshal(e) for e in o)
|
1401
|
+
|
1402
|
+
|
1403
|
+
@dc.dataclass(frozen=True)
|
1404
|
+
class DataclassObjMarshaler(ObjMarshaler):
|
1405
|
+
ty: type
|
1406
|
+
fs: ta.Mapping[str, ObjMarshaler]
|
1407
|
+
nonstrict: bool = False
|
1408
|
+
|
1409
|
+
def marshal(self, o: ta.Any) -> ta.Any:
|
1410
|
+
return {k: m.marshal(getattr(o, k)) for k, m in self.fs.items()}
|
1411
|
+
|
1412
|
+
def unmarshal(self, o: ta.Any) -> ta.Any:
|
1413
|
+
return self.ty(**{k: self.fs[k].unmarshal(v) for k, v in o.items() if self.nonstrict or k in self.fs})
|
1414
|
+
|
1415
|
+
|
1416
|
+
@dc.dataclass(frozen=True)
|
1417
|
+
class PolymorphicObjMarshaler(ObjMarshaler):
|
1418
|
+
class Impl(ta.NamedTuple):
|
1419
|
+
ty: type
|
1420
|
+
tag: str
|
1421
|
+
m: ObjMarshaler
|
1422
|
+
|
1423
|
+
impls_by_ty: ta.Mapping[type, Impl]
|
1424
|
+
impls_by_tag: ta.Mapping[str, Impl]
|
1425
|
+
|
1426
|
+
def marshal(self, o: ta.Any) -> ta.Any:
|
1427
|
+
impl = self.impls_by_ty[type(o)]
|
1428
|
+
return {impl.tag: impl.m.marshal(o)}
|
1429
|
+
|
1430
|
+
def unmarshal(self, o: ta.Any) -> ta.Any:
|
1431
|
+
[(t, v)] = o.items()
|
1432
|
+
impl = self.impls_by_tag[t]
|
1433
|
+
return impl.m.unmarshal(v)
|
1434
|
+
|
1435
|
+
|
1436
|
+
@dc.dataclass(frozen=True)
|
1437
|
+
class DatetimeObjMarshaler(ObjMarshaler):
|
1438
|
+
ty: type
|
1439
|
+
|
1440
|
+
def marshal(self, o: ta.Any) -> ta.Any:
|
1441
|
+
return o.isoformat()
|
1442
|
+
|
1443
|
+
def unmarshal(self, o: ta.Any) -> ta.Any:
|
1444
|
+
return self.ty.fromisoformat(o) # type: ignore
|
1445
|
+
|
1446
|
+
|
1447
|
+
class DecimalObjMarshaler(ObjMarshaler):
|
1448
|
+
def marshal(self, o: ta.Any) -> ta.Any:
|
1449
|
+
return str(check_isinstance(o, decimal.Decimal))
|
1450
|
+
|
1451
|
+
def unmarshal(self, v: ta.Any) -> ta.Any:
|
1452
|
+
return decimal.Decimal(check_isinstance(v, str))
|
1453
|
+
|
1454
|
+
|
1455
|
+
class FractionObjMarshaler(ObjMarshaler):
|
1456
|
+
def marshal(self, o: ta.Any) -> ta.Any:
|
1457
|
+
fr = check_isinstance(o, fractions.Fraction)
|
1458
|
+
return [fr.numerator, fr.denominator]
|
1459
|
+
|
1460
|
+
def unmarshal(self, v: ta.Any) -> ta.Any:
|
1461
|
+
num, denom = check_isinstance(v, list)
|
1462
|
+
return fractions.Fraction(num, denom)
|
1463
|
+
|
1464
|
+
|
1465
|
+
class UuidObjMarshaler(ObjMarshaler):
|
1466
|
+
def marshal(self, o: ta.Any) -> ta.Any:
|
1467
|
+
return str(o)
|
1468
|
+
|
1469
|
+
def unmarshal(self, o: ta.Any) -> ta.Any:
|
1470
|
+
return uuid.UUID(o)
|
1471
|
+
|
1472
|
+
|
1473
|
+
_OBJ_MARSHALERS: ta.Dict[ta.Any, ObjMarshaler] = {
|
1474
|
+
**{t: NopObjMarshaler() for t in (type(None),)},
|
1475
|
+
**{t: CastObjMarshaler(t) for t in (int, float, str, bool)},
|
1476
|
+
**{t: Base64ObjMarshaler(t) for t in (bytes, bytearray)},
|
1477
|
+
**{t: IterableObjMarshaler(t, DynamicObjMarshaler()) for t in (list, tuple, set, frozenset)},
|
1478
|
+
**{t: MappingObjMarshaler(t, DynamicObjMarshaler(), DynamicObjMarshaler()) for t in (dict,)},
|
1479
|
+
|
1480
|
+
ta.Any: DynamicObjMarshaler(),
|
1481
|
+
|
1482
|
+
**{t: DatetimeObjMarshaler(t) for t in (datetime.date, datetime.time, datetime.datetime)},
|
1483
|
+
decimal.Decimal: DecimalObjMarshaler(),
|
1484
|
+
fractions.Fraction: FractionObjMarshaler(),
|
1485
|
+
uuid.UUID: UuidObjMarshaler(),
|
1486
|
+
}
|
1487
|
+
|
1488
|
+
_OBJ_MARSHALER_GENERIC_MAPPING_TYPES: ta.Dict[ta.Any, type] = {
|
1489
|
+
**{t: t for t in (dict,)},
|
1490
|
+
**{t: dict for t in (collections.abc.Mapping, collections.abc.MutableMapping)},
|
1491
|
+
}
|
1492
|
+
|
1493
|
+
_OBJ_MARSHALER_GENERIC_ITERABLE_TYPES: ta.Dict[ta.Any, type] = {
|
1494
|
+
**{t: t for t in (list, tuple, set, frozenset)},
|
1495
|
+
collections.abc.Set: frozenset,
|
1496
|
+
collections.abc.MutableSet: set,
|
1497
|
+
collections.abc.Sequence: tuple,
|
1498
|
+
collections.abc.MutableSequence: list,
|
1499
|
+
}
|
1500
|
+
|
1501
|
+
|
1502
|
+
def register_opj_marshaler(ty: ta.Any, m: ObjMarshaler) -> None:
|
1503
|
+
if ty in _OBJ_MARSHALERS:
|
1504
|
+
raise KeyError(ty)
|
1505
|
+
_OBJ_MARSHALERS[ty] = m
|
1506
|
+
|
1507
|
+
|
1508
|
+
def _make_obj_marshaler(ty: ta.Any) -> ObjMarshaler:
|
1509
|
+
if isinstance(ty, type):
|
1510
|
+
if abc.ABC in ty.__bases__:
|
1511
|
+
impls = [ # type: ignore
|
1512
|
+
PolymorphicObjMarshaler.Impl(
|
1513
|
+
ity,
|
1514
|
+
ity.__qualname__,
|
1515
|
+
get_obj_marshaler(ity),
|
1516
|
+
)
|
1517
|
+
for ity in deep_subclasses(ty)
|
1518
|
+
if abc.ABC not in ity.__bases__
|
1519
|
+
]
|
1520
|
+
return PolymorphicObjMarshaler(
|
1521
|
+
{i.ty: i for i in impls},
|
1522
|
+
{i.tag: i for i in impls},
|
1523
|
+
)
|
1524
|
+
|
1525
|
+
if issubclass(ty, enum.Enum):
|
1526
|
+
return EnumObjMarshaler(ty)
|
1527
|
+
|
1528
|
+
if dc.is_dataclass(ty):
|
1529
|
+
return DataclassObjMarshaler(
|
1530
|
+
ty,
|
1531
|
+
{f.name: get_obj_marshaler(f.type) for f in dc.fields(ty)},
|
1532
|
+
)
|
1533
|
+
|
1534
|
+
if is_generic_alias(ty):
|
1535
|
+
try:
|
1536
|
+
mt = _OBJ_MARSHALER_GENERIC_MAPPING_TYPES[ta.get_origin(ty)]
|
1537
|
+
except KeyError:
|
1538
|
+
pass
|
1539
|
+
else:
|
1540
|
+
k, v = ta.get_args(ty)
|
1541
|
+
return MappingObjMarshaler(mt, get_obj_marshaler(k), get_obj_marshaler(v))
|
1542
|
+
|
1543
|
+
try:
|
1544
|
+
st = _OBJ_MARSHALER_GENERIC_ITERABLE_TYPES[ta.get_origin(ty)]
|
1545
|
+
except KeyError:
|
1546
|
+
pass
|
1547
|
+
else:
|
1548
|
+
[e] = ta.get_args(ty)
|
1549
|
+
return IterableObjMarshaler(st, get_obj_marshaler(e))
|
1550
|
+
|
1551
|
+
if is_union_alias(ty):
|
1552
|
+
return OptionalObjMarshaler(get_obj_marshaler(get_optional_alias_arg(ty)))
|
1553
|
+
|
1554
|
+
raise TypeError(ty)
|
1555
|
+
|
1556
|
+
|
1557
|
+
def get_obj_marshaler(ty: ta.Any) -> ObjMarshaler:
|
1558
|
+
try:
|
1559
|
+
return _OBJ_MARSHALERS[ty]
|
1560
|
+
except KeyError:
|
1561
|
+
pass
|
1562
|
+
|
1563
|
+
p = ProxyObjMarshaler()
|
1564
|
+
_OBJ_MARSHALERS[ty] = p
|
1565
|
+
try:
|
1566
|
+
m = _make_obj_marshaler(ty)
|
1567
|
+
except Exception:
|
1568
|
+
del _OBJ_MARSHALERS[ty]
|
1569
|
+
raise
|
1570
|
+
else:
|
1571
|
+
p.m = m
|
1572
|
+
_OBJ_MARSHALERS[ty] = m
|
1573
|
+
return m
|
1574
|
+
|
1575
|
+
|
1576
|
+
def marshal_obj(o: ta.Any, ty: ta.Any = None) -> ta.Any:
|
1577
|
+
return get_obj_marshaler(ty if ty is not None else type(o)).marshal(o)
|
1578
|
+
|
1579
|
+
|
1580
|
+
def unmarshal_obj(o: ta.Any, ty: ta.Union[ta.Type[T], ta.Any]) -> T:
|
1581
|
+
return get_obj_marshaler(ty).unmarshal(o)
|
1582
|
+
|
1583
|
+
|
1217
1584
|
########################################
|
1218
1585
|
# ../events.py
|
1219
1586
|
|
@@ -1245,12 +1612,11 @@ notify_event = EVENT_CALLBACKS.notify
|
|
1245
1612
|
clear_events = EVENT_CALLBACKS.clear
|
1246
1613
|
|
1247
1614
|
|
1248
|
-
class Event:
|
1615
|
+
class Event(abc.ABC): # noqa
|
1249
1616
|
"""Abstract event type """
|
1250
1617
|
|
1251
1618
|
|
1252
|
-
class ProcessLogEvent(Event):
|
1253
|
-
"""Abstract"""
|
1619
|
+
class ProcessLogEvent(Event, abc.ABC):
|
1254
1620
|
channel: ta.Optional[str] = None
|
1255
1621
|
|
1256
1622
|
def __init__(self, process, pid, data):
|
@@ -1267,8 +1633,8 @@ class ProcessLogEvent(Event):
|
|
1267
1633
|
data = as_string(self.data)
|
1268
1634
|
except UnicodeDecodeError:
|
1269
1635
|
data = f'Undecodable: {self.data!r}'
|
1270
|
-
|
1271
|
-
result =
|
1636
|
+
|
1637
|
+
result = 'processname:%s groupname:%s pid:%s channel:%s\n%s' % ( # noqa
|
1272
1638
|
as_string(self.process.config.name),
|
1273
1639
|
as_string(groupname),
|
1274
1640
|
self.pid,
|
@@ -1286,8 +1652,7 @@ class ProcessLogStderrEvent(ProcessLogEvent):
|
|
1286
1652
|
channel = 'stderr'
|
1287
1653
|
|
1288
1654
|
|
1289
|
-
class ProcessCommunicationEvent(Event):
|
1290
|
-
""" Abstract """
|
1655
|
+
class ProcessCommunicationEvent(Event, abc.ABC):
|
1291
1656
|
# event mode tokens
|
1292
1657
|
BEGIN_TOKEN = b'<!--XSUPERVISOR:BEGIN-->'
|
1293
1658
|
END_TOKEN = b'<!--XSUPERVISOR:END-->'
|
@@ -1651,6 +2016,7 @@ class ServerContext(AbstractServerContext):
|
|
1651
2016
|
Set the uid of the supervisord process. Called during supervisord startup only. No return value. Exits the
|
1652
2017
|
process via usage() if privileges could not be dropped.
|
1653
2018
|
"""
|
2019
|
+
|
1654
2020
|
if self.uid is None:
|
1655
2021
|
if os.getuid() == 0:
|
1656
2022
|
warnings.warn(
|
@@ -1830,6 +2196,7 @@ def drop_privileges(user: ta.Union[int, str, None]) -> ta.Optional[str]:
|
|
1830
2196
|
and when spawning subprocesses. Returns None on success or a string error message if privileges could not be
|
1831
2197
|
dropped.
|
1832
2198
|
"""
|
2199
|
+
|
1833
2200
|
if user is None:
|
1834
2201
|
return 'No user specified to setuid to!'
|
1835
2202
|
|
@@ -2022,7 +2389,9 @@ class OutputDispatcher(Dispatcher):
|
|
2022
2389
|
|
2023
2390
|
`event_type` should be one of ProcessLogStdoutEvent or ProcessLogStderrEvent
|
2024
2391
|
"""
|
2392
|
+
|
2025
2393
|
super().__init__(process, event_type.channel, fd)
|
2394
|
+
|
2026
2395
|
self.event_type = event_type
|
2027
2396
|
|
2028
2397
|
self.lc: ProcessConfig.Log = getattr(process.config, self._channel)
|
@@ -2036,25 +2405,30 @@ class OutputDispatcher(Dispatcher):
|
|
2036
2405
|
self._output_buffer = b'' # data waiting to be logged
|
2037
2406
|
|
2038
2407
|
# all code below is purely for minor speedups
|
2408
|
+
|
2039
2409
|
begin_token = self.event_type.BEGIN_TOKEN
|
2040
2410
|
end_token = self.event_type.END_TOKEN
|
2041
|
-
self.
|
2042
|
-
self.
|
2043
|
-
|
2411
|
+
self._begin_token_data = (begin_token, len(begin_token))
|
2412
|
+
self._end_token_data = (end_token, len(end_token))
|
2413
|
+
|
2414
|
+
self._main_log_level = logging.DEBUG
|
2415
|
+
|
2416
|
+
self._log_to_main_log = process.context.config.loglevel <= self._main_log_level
|
2417
|
+
|
2044
2418
|
config = self._process.config
|
2045
|
-
self.
|
2046
|
-
self.
|
2047
|
-
self.stderr_events_enabled = config.stderr.events_enabled
|
2419
|
+
self._stdout_events_enabled = config.stdout.events_enabled
|
2420
|
+
self._stderr_events_enabled = config.stderr.events_enabled
|
2048
2421
|
|
2049
|
-
_child_log: ta.Optional[logging.Logger] # the current logger (normal_log or capture_log)
|
2050
|
-
_normal_log: ta.Optional[logging.Logger] # the "normal" (non-capture) logger
|
2051
|
-
_capture_log: ta.Optional[logging.Logger] # the logger used while we're in capture_mode
|
2422
|
+
_child_log: ta.Optional[logging.Logger] = None # the current logger (normal_log or capture_log)
|
2423
|
+
_normal_log: ta.Optional[logging.Logger] = None # the "normal" (non-capture) logger
|
2424
|
+
_capture_log: ta.Optional[logging.Logger] = None # the logger used while we're in capture_mode
|
2052
2425
|
|
2053
2426
|
def _init_normal_log(self) -> None:
|
2054
2427
|
"""
|
2055
2428
|
Configure the "normal" (non-capture) log for this channel of this process. Sets self.normal_log if logging is
|
2056
2429
|
enabled.
|
2057
2430
|
"""
|
2431
|
+
|
2058
2432
|
config = self._process.config # noqa
|
2059
2433
|
channel = self._channel # noqa
|
2060
2434
|
|
@@ -2075,7 +2449,7 @@ class OutputDispatcher(Dispatcher):
|
|
2075
2449
|
# maxbytes=maxbytes,
|
2076
2450
|
# backups=backups,
|
2077
2451
|
# )
|
2078
|
-
|
2452
|
+
|
2079
2453
|
# if to_syslog:
|
2080
2454
|
# loggers.handle_syslog(
|
2081
2455
|
# self.normal_log,
|
@@ -2087,6 +2461,7 @@ class OutputDispatcher(Dispatcher):
|
|
2087
2461
|
Configure the capture log for this process. This log is used to temporarily capture output when special output
|
2088
2462
|
is detected. Sets self.capture_log if capturing is enabled.
|
2089
2463
|
"""
|
2464
|
+
|
2090
2465
|
capture_maxbytes = self.lc.capture_maxbytes
|
2091
2466
|
if capture_maxbytes:
|
2092
2467
|
self._capture_log = logging.getLogger(__name__)
|
@@ -2113,9 +2488,11 @@ class OutputDispatcher(Dispatcher):
|
|
2113
2488
|
if data:
|
2114
2489
|
if self._process.context.config.strip_ansi:
|
2115
2490
|
data = strip_escapes(data)
|
2491
|
+
|
2116
2492
|
if self._child_log:
|
2117
2493
|
self._child_log.info(data)
|
2118
|
-
|
2494
|
+
|
2495
|
+
if self._log_to_main_log:
|
2119
2496
|
if not isinstance(data, bytes):
|
2120
2497
|
text = data
|
2121
2498
|
else:
|
@@ -2123,11 +2500,13 @@ class OutputDispatcher(Dispatcher):
|
|
2123
2500
|
text = data.decode('utf-8')
|
2124
2501
|
except UnicodeDecodeError:
|
2125
2502
|
text = f'Undecodable: {data!r}'
|
2126
|
-
log.log(self.
|
2503
|
+
log.log(self._main_log_level, '%r %s output:\n%s', self._process.config.name, self._channel, text) # noqa
|
2504
|
+
|
2127
2505
|
if self._channel == 'stdout':
|
2128
|
-
if self.
|
2506
|
+
if self._stdout_events_enabled:
|
2129
2507
|
notify_event(ProcessLogStdoutEvent(self._process, self._process.pid, data))
|
2130
|
-
|
2508
|
+
|
2509
|
+
elif self._stderr_events_enabled:
|
2131
2510
|
notify_event(ProcessLogStderrEvent(self._process, self._process.pid, data))
|
2132
2511
|
|
2133
2512
|
def record_output(self):
|
@@ -2139,9 +2518,9 @@ class OutputDispatcher(Dispatcher):
|
|
2139
2518
|
return
|
2140
2519
|
|
2141
2520
|
if self._capture_mode:
|
2142
|
-
token, tokenlen = self.
|
2521
|
+
token, tokenlen = self._end_token_data
|
2143
2522
|
else:
|
2144
|
-
token, tokenlen = self.
|
2523
|
+
token, tokenlen = self._begin_token_data
|
2145
2524
|
|
2146
2525
|
if len(self._output_buffer) <= tokenlen:
|
2147
2526
|
return # not enough data
|
@@ -2329,6 +2708,7 @@ class Subprocess(AbstractSubprocess):
|
|
2329
2708
|
Internal: turn a program name into a file name, using $PATH, make sure it exists / is executable, raising a
|
2330
2709
|
ProcessError if not
|
2331
2710
|
"""
|
2711
|
+
|
2332
2712
|
try:
|
2333
2713
|
commandargs = shlex.split(self.config.command)
|
2334
2714
|
except ValueError as e:
|
@@ -2712,8 +3092,12 @@ class Subprocess(AbstractSubprocess):
|
|
2712
3092
|
os.kill(self.pid, sig)
|
2713
3093
|
except OSError as exc:
|
2714
3094
|
if exc.errno == errno.ESRCH:
|
2715
|
-
log.debug(
|
2716
|
-
|
3095
|
+
log.debug(
|
3096
|
+
'unable to signal %s (pid %s), it probably just now exited on its own: %s',
|
3097
|
+
processname,
|
3098
|
+
self.pid,
|
3099
|
+
str(exc),
|
3100
|
+
)
|
2717
3101
|
# we could change the state here but we intentionally do not. we will do it during normal SIGCHLD
|
2718
3102
|
# processing.
|
2719
3103
|
return None
|
@@ -2729,6 +3113,7 @@ class Subprocess(AbstractSubprocess):
|
|
2729
3113
|
|
2730
3114
|
def finish(self, sts: int) -> None:
|
2731
3115
|
""" The process was reaped and we need to report and manage its state """
|
3116
|
+
|
2732
3117
|
self.drain()
|
2733
3118
|
|
2734
3119
|
es, msg = decode_wait_status(sts)
|
@@ -2745,9 +3130,11 @@ class Subprocess(AbstractSubprocess):
|
|
2745
3130
|
else:
|
2746
3131
|
too_quickly = False
|
2747
3132
|
log.warning(
|
2748
|
-
"process '%s' (%s) laststart time is in the future, don't "
|
2749
|
-
"
|
2750
|
-
|
3133
|
+
"process '%s' (%s) laststart time is in the future, don't know how long process was running so "
|
3134
|
+
"assuming it did not exit too quickly",
|
3135
|
+
processname,
|
3136
|
+
self.pid,
|
3137
|
+
)
|
2751
3138
|
|
2752
3139
|
exit_expected = es in self.config.exitcodes
|
2753
3140
|
|
@@ -2844,7 +3231,7 @@ class Subprocess(AbstractSubprocess):
|
|
2844
3231
|
if self.config.autorestart is RestartUnconditionally:
|
2845
3232
|
# EXITED -> STARTING
|
2846
3233
|
self.spawn()
|
2847
|
-
elif self.exitstatus not in self.config.exitcodes:
|
3234
|
+
elif self.exitstatus not in self.config.exitcodes:
|
2848
3235
|
# EXITED -> STARTING
|
2849
3236
|
self.spawn()
|
2850
3237
|
|
@@ -3271,39 +3658,24 @@ def timeslice(period, when):
|
|
3271
3658
|
|
3272
3659
|
|
3273
3660
|
def main(args=None, test=False):
|
3661
|
+
import argparse
|
3662
|
+
|
3663
|
+
parser = argparse.ArgumentParser()
|
3664
|
+
parser.add_argument('config_file', metavar='config-file')
|
3665
|
+
args = parser.parse_args()
|
3666
|
+
|
3274
3667
|
configure_standard_logging('INFO')
|
3275
3668
|
|
3669
|
+
if not (cf := args.config_file):
|
3670
|
+
raise RuntimeError('No config file specified')
|
3671
|
+
|
3276
3672
|
# if we hup, restart by making a new Supervisor()
|
3277
3673
|
first = True
|
3278
3674
|
while True:
|
3279
|
-
|
3280
|
-
|
3281
|
-
|
3282
|
-
|
3283
|
-
name='default',
|
3284
|
-
processes=[
|
3285
|
-
ProcessConfig(
|
3286
|
-
name='sleep',
|
3287
|
-
command='sleep 600',
|
3288
|
-
stdout=ProcessConfig.Log(
|
3289
|
-
file='/dev/fd/1',
|
3290
|
-
maxbytes=0,
|
3291
|
-
),
|
3292
|
-
redirect_stderr=True,
|
3293
|
-
),
|
3294
|
-
ProcessConfig(
|
3295
|
-
name='ls',
|
3296
|
-
command='ls -al',
|
3297
|
-
stdout=ProcessConfig.Log(
|
3298
|
-
file='/dev/fd/1',
|
3299
|
-
maxbytes=0,
|
3300
|
-
),
|
3301
|
-
redirect_stderr=True,
|
3302
|
-
),
|
3303
|
-
],
|
3304
|
-
),
|
3305
|
-
],
|
3306
|
-
)
|
3675
|
+
with open(cf) as f:
|
3676
|
+
config_src = f.read()
|
3677
|
+
config_dct = json.loads(config_src)
|
3678
|
+
config: ServerConfig = unmarshal_obj(config_dct, ServerConfig)
|
3307
3679
|
|
3308
3680
|
context = ServerContext(
|
3309
3681
|
config,
|