ominfra 0.0.0.dev137__py3-none-any.whl → 0.0.0.dev139__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ominfra/manage/__init__.py +13 -0
- ominfra/manage/{new/main.py → main.py} +68 -18
- ominfra/pyremote.py +196 -145
- ominfra/{manage/new/_manage.py → scripts/manage.py} +267 -170
- ominfra/scripts/supervisor.py +32 -31
- ominfra/supervisor/processimpl.py +32 -31
- {ominfra-0.0.0.dev137.dist-info → ominfra-0.0.0.dev139.dist-info}/METADATA +3 -3
- {ominfra-0.0.0.dev137.dist-info → ominfra-0.0.0.dev139.dist-info}/RECORD +15 -44
- ominfra/manage/deploy/_executor.py +0 -1415
- ominfra/manage/deploy/configs.py +0 -19
- ominfra/manage/deploy/executor/__init__.py +0 -1
- ominfra/manage/deploy/executor/base.py +0 -115
- ominfra/manage/deploy/executor/concerns/__init__.py +0 -0
- ominfra/manage/deploy/executor/concerns/dirs.py +0 -28
- ominfra/manage/deploy/executor/concerns/nginx.py +0 -47
- ominfra/manage/deploy/executor/concerns/repo.py +0 -17
- ominfra/manage/deploy/executor/concerns/supervisor.py +0 -46
- ominfra/manage/deploy/executor/concerns/systemd.py +0 -88
- ominfra/manage/deploy/executor/concerns/user.py +0 -25
- ominfra/manage/deploy/executor/concerns/venv.py +0 -22
- ominfra/manage/deploy/executor/main.py +0 -119
- ominfra/manage/deploy/poly/__init__.py +0 -1
- ominfra/manage/deploy/poly/_main.py +0 -975
- ominfra/manage/deploy/poly/base.py +0 -178
- ominfra/manage/deploy/poly/configs.py +0 -38
- ominfra/manage/deploy/poly/deploy.py +0 -25
- ominfra/manage/deploy/poly/main.py +0 -18
- ominfra/manage/deploy/poly/nginx.py +0 -60
- ominfra/manage/deploy/poly/repo.py +0 -41
- ominfra/manage/deploy/poly/runtime.py +0 -39
- ominfra/manage/deploy/poly/site.py +0 -11
- ominfra/manage/deploy/poly/supervisor.py +0 -64
- ominfra/manage/deploy/poly/venv.py +0 -52
- ominfra/manage/deploy/remote.py +0 -91
- ominfra/manage/manage.py +0 -12
- ominfra/manage/new/__init__.py +0 -1
- ominfra/manage/new/commands/__init__.py +0 -0
- /ominfra/manage/{deploy → commands}/__init__.py +0 -0
- /ominfra/manage/{new/commands → commands}/base.py +0 -0
- /ominfra/manage/{new/commands → commands}/subprocess.py +0 -0
- {ominfra-0.0.0.dev137.dist-info → ominfra-0.0.0.dev139.dist-info}/LICENSE +0 -0
- {ominfra-0.0.0.dev137.dist-info → ominfra-0.0.0.dev139.dist-info}/WHEEL +0 -0
- {ominfra-0.0.0.dev137.dist-info → ominfra-0.0.0.dev139.dist-info}/entry_points.txt +0 -0
- {ominfra-0.0.0.dev137.dist-info → ominfra-0.0.0.dev139.dist-info}/top_level.txt +0 -0
@@ -1,1415 +0,0 @@
|
|
1
|
-
#!/usr/bin/env python3
|
2
|
-
# noinspection DuplicatedCode
|
3
|
-
# @omlish-lite
|
4
|
-
# @omlish-script
|
5
|
-
# @omlish-amalg-output executor/main.py
|
6
|
-
# ruff: noqa: N802 UP006 UP007 UP036
|
7
|
-
r"""
|
8
|
-
TODO:
|
9
|
-
- flock
|
10
|
-
- interp.py
|
11
|
-
- systemd
|
12
|
-
|
13
|
-
deployment matrix
|
14
|
-
- os: ubuntu / amzn / generic
|
15
|
-
- arch: amd64 / arm64
|
16
|
-
- host: bare / docker
|
17
|
-
- init: supervisor-provided / supervisor-must-configure / systemd (/ self?)
|
18
|
-
- interp: system / pyenv / interp.py
|
19
|
-
- venv: none / yes
|
20
|
-
- nginx: no / provided / must-configure
|
21
|
-
|
22
|
-
==
|
23
|
-
|
24
|
-
~deploy
|
25
|
-
deploy.pid (flock)
|
26
|
-
/app
|
27
|
-
/<appspec> - shallow clone
|
28
|
-
/conf
|
29
|
-
/env
|
30
|
-
<appspec>.env
|
31
|
-
/nginx
|
32
|
-
<appspec>.conf
|
33
|
-
/supervisor
|
34
|
-
<appspec>.conf
|
35
|
-
/venv
|
36
|
-
/<appspec>
|
37
|
-
|
38
|
-
?
|
39
|
-
/logs
|
40
|
-
/wrmsr--omlish--<spec>
|
41
|
-
|
42
|
-
spec = <name>--<rev>--<when>
|
43
|
-
|
44
|
-
https://docs.docker.com/config/containers/multi-service_container/#use-a-process-manager
|
45
|
-
https://serverfault.com/questions/211525/supervisor-not-loading-new-configuration-files
|
46
|
-
""" # noqa
|
47
|
-
import abc
|
48
|
-
import argparse
|
49
|
-
import base64
|
50
|
-
import collections.abc
|
51
|
-
import contextlib
|
52
|
-
import dataclasses as dc
|
53
|
-
import datetime
|
54
|
-
import decimal
|
55
|
-
import enum
|
56
|
-
import fractions
|
57
|
-
import functools
|
58
|
-
import inspect
|
59
|
-
import json
|
60
|
-
import logging
|
61
|
-
import os
|
62
|
-
import os.path
|
63
|
-
import pwd
|
64
|
-
import shlex
|
65
|
-
import subprocess
|
66
|
-
import sys
|
67
|
-
import textwrap
|
68
|
-
import threading
|
69
|
-
import types
|
70
|
-
import typing as ta
|
71
|
-
import uuid
|
72
|
-
import weakref # noqa
|
73
|
-
|
74
|
-
|
75
|
-
########################################
|
76
|
-
|
77
|
-
|
78
|
-
if sys.version_info < (3, 8):
|
79
|
-
raise OSError(f'Requires python (3, 8), got {sys.version_info} from {sys.executable}') # noqa
|
80
|
-
|
81
|
-
|
82
|
-
########################################
|
83
|
-
|
84
|
-
|
85
|
-
# ../../../../omlish/lite/cached.py
|
86
|
-
T = ta.TypeVar('T')
|
87
|
-
|
88
|
-
# ../../../../omlish/lite/check.py
|
89
|
-
SizedT = ta.TypeVar('SizedT', bound=ta.Sized)
|
90
|
-
|
91
|
-
|
92
|
-
########################################
|
93
|
-
# ../../configs.py
|
94
|
-
|
95
|
-
|
96
|
-
@dc.dataclass(frozen=True)
|
97
|
-
class DeployConfig:
|
98
|
-
python_bin: str
|
99
|
-
app_name: str
|
100
|
-
repo_url: str
|
101
|
-
revision: str
|
102
|
-
requirements_txt: str
|
103
|
-
entrypoint: str
|
104
|
-
|
105
|
-
|
106
|
-
@dc.dataclass(frozen=True)
|
107
|
-
class HostConfig:
|
108
|
-
username: str = 'deploy'
|
109
|
-
|
110
|
-
global_supervisor_conf_file_path: str = '/etc/supervisor/conf.d/supervisord.conf'
|
111
|
-
global_nginx_conf_file_path: str = '/etc/nginx/sites-enabled/deploy.conf'
|
112
|
-
|
113
|
-
|
114
|
-
########################################
|
115
|
-
# ../../../../../omlish/lite/cached.py
|
116
|
-
|
117
|
-
|
118
|
-
class _cached_nullary: # noqa
|
119
|
-
def __init__(self, fn):
|
120
|
-
super().__init__()
|
121
|
-
self._fn = fn
|
122
|
-
self._value = self._missing = object()
|
123
|
-
functools.update_wrapper(self, fn)
|
124
|
-
|
125
|
-
def __call__(self, *args, **kwargs): # noqa
|
126
|
-
if self._value is self._missing:
|
127
|
-
self._value = self._fn()
|
128
|
-
return self._value
|
129
|
-
|
130
|
-
def __get__(self, instance, owner): # noqa
|
131
|
-
bound = instance.__dict__[self._fn.__name__] = self.__class__(self._fn.__get__(instance, owner))
|
132
|
-
return bound
|
133
|
-
|
134
|
-
|
135
|
-
def cached_nullary(fn): # ta.Callable[..., T]) -> ta.Callable[..., T]:
|
136
|
-
return _cached_nullary(fn)
|
137
|
-
|
138
|
-
|
139
|
-
########################################
|
140
|
-
# ../../../../../omlish/lite/check.py
|
141
|
-
|
142
|
-
|
143
|
-
def check_isinstance(v: ta.Any, spec: ta.Union[ta.Type[T], tuple]) -> T:
|
144
|
-
if not isinstance(v, spec):
|
145
|
-
raise TypeError(v)
|
146
|
-
return v
|
147
|
-
|
148
|
-
|
149
|
-
def check_not_isinstance(v: T, spec: ta.Union[type, tuple]) -> T:
|
150
|
-
if isinstance(v, spec):
|
151
|
-
raise TypeError(v)
|
152
|
-
return v
|
153
|
-
|
154
|
-
|
155
|
-
def check_none(v: T) -> None:
|
156
|
-
if v is not None:
|
157
|
-
raise ValueError(v)
|
158
|
-
|
159
|
-
|
160
|
-
def check_not_none(v: ta.Optional[T]) -> T:
|
161
|
-
if v is None:
|
162
|
-
raise ValueError
|
163
|
-
return v
|
164
|
-
|
165
|
-
|
166
|
-
def check_not(v: ta.Any) -> None:
|
167
|
-
if v:
|
168
|
-
raise ValueError(v)
|
169
|
-
return v
|
170
|
-
|
171
|
-
|
172
|
-
def check_non_empty_str(v: ta.Optional[str]) -> str:
|
173
|
-
if not v:
|
174
|
-
raise ValueError
|
175
|
-
return v
|
176
|
-
|
177
|
-
|
178
|
-
def check_state(v: bool, msg: str = 'Illegal state') -> None:
|
179
|
-
if not v:
|
180
|
-
raise ValueError(msg)
|
181
|
-
|
182
|
-
|
183
|
-
def check_equal(l: T, r: T) -> T:
|
184
|
-
if l != r:
|
185
|
-
raise ValueError(l, r)
|
186
|
-
return l
|
187
|
-
|
188
|
-
|
189
|
-
def check_not_equal(l: T, r: T) -> T:
|
190
|
-
if l == r:
|
191
|
-
raise ValueError(l, r)
|
192
|
-
return l
|
193
|
-
|
194
|
-
|
195
|
-
def check_is(l: T, r: T) -> T:
|
196
|
-
if l is not r:
|
197
|
-
raise ValueError(l, r)
|
198
|
-
return l
|
199
|
-
|
200
|
-
|
201
|
-
def check_is_not(l: T, r: ta.Any) -> T:
|
202
|
-
if l is r:
|
203
|
-
raise ValueError(l, r)
|
204
|
-
return l
|
205
|
-
|
206
|
-
|
207
|
-
def check_in(v: T, c: ta.Container[T]) -> T:
|
208
|
-
if v not in c:
|
209
|
-
raise ValueError(v, c)
|
210
|
-
return v
|
211
|
-
|
212
|
-
|
213
|
-
def check_not_in(v: T, c: ta.Container[T]) -> T:
|
214
|
-
if v in c:
|
215
|
-
raise ValueError(v, c)
|
216
|
-
return v
|
217
|
-
|
218
|
-
|
219
|
-
def check_single(vs: ta.Iterable[T]) -> T:
|
220
|
-
[v] = vs
|
221
|
-
return v
|
222
|
-
|
223
|
-
|
224
|
-
def check_empty(v: SizedT) -> SizedT:
|
225
|
-
if len(v):
|
226
|
-
raise ValueError(v)
|
227
|
-
return v
|
228
|
-
|
229
|
-
|
230
|
-
def check_non_empty(v: SizedT) -> SizedT:
|
231
|
-
if not len(v):
|
232
|
-
raise ValueError(v)
|
233
|
-
return v
|
234
|
-
|
235
|
-
|
236
|
-
########################################
|
237
|
-
# ../../../../../omlish/lite/json.py
|
238
|
-
|
239
|
-
|
240
|
-
##
|
241
|
-
|
242
|
-
|
243
|
-
JSON_PRETTY_INDENT = 2
|
244
|
-
|
245
|
-
JSON_PRETTY_KWARGS: ta.Mapping[str, ta.Any] = dict(
|
246
|
-
indent=JSON_PRETTY_INDENT,
|
247
|
-
)
|
248
|
-
|
249
|
-
json_dump_pretty: ta.Callable[..., bytes] = functools.partial(json.dump, **JSON_PRETTY_KWARGS) # type: ignore
|
250
|
-
json_dumps_pretty: ta.Callable[..., str] = functools.partial(json.dumps, **JSON_PRETTY_KWARGS)
|
251
|
-
|
252
|
-
|
253
|
-
##
|
254
|
-
|
255
|
-
|
256
|
-
JSON_COMPACT_SEPARATORS = (',', ':')
|
257
|
-
|
258
|
-
JSON_COMPACT_KWARGS: ta.Mapping[str, ta.Any] = dict(
|
259
|
-
indent=None,
|
260
|
-
separators=JSON_COMPACT_SEPARATORS,
|
261
|
-
)
|
262
|
-
|
263
|
-
json_dump_compact: ta.Callable[..., bytes] = functools.partial(json.dump, **JSON_COMPACT_KWARGS) # type: ignore
|
264
|
-
json_dumps_compact: ta.Callable[..., str] = functools.partial(json.dumps, **JSON_COMPACT_KWARGS)
|
265
|
-
|
266
|
-
|
267
|
-
########################################
|
268
|
-
# ../../../../../omlish/lite/reflect.py
|
269
|
-
|
270
|
-
|
271
|
-
_GENERIC_ALIAS_TYPES = (
|
272
|
-
ta._GenericAlias, # type: ignore # noqa
|
273
|
-
*([ta._SpecialGenericAlias] if hasattr(ta, '_SpecialGenericAlias') else []), # noqa
|
274
|
-
)
|
275
|
-
|
276
|
-
|
277
|
-
def is_generic_alias(obj, *, origin: ta.Any = None) -> bool:
|
278
|
-
return (
|
279
|
-
isinstance(obj, _GENERIC_ALIAS_TYPES) and
|
280
|
-
(origin is None or ta.get_origin(obj) is origin)
|
281
|
-
)
|
282
|
-
|
283
|
-
|
284
|
-
is_union_alias = functools.partial(is_generic_alias, origin=ta.Union)
|
285
|
-
is_callable_alias = functools.partial(is_generic_alias, origin=ta.Callable)
|
286
|
-
|
287
|
-
|
288
|
-
def is_optional_alias(spec: ta.Any) -> bool:
|
289
|
-
return (
|
290
|
-
isinstance(spec, _GENERIC_ALIAS_TYPES) and # noqa
|
291
|
-
ta.get_origin(spec) is ta.Union and
|
292
|
-
len(ta.get_args(spec)) == 2 and
|
293
|
-
any(a in (None, type(None)) for a in ta.get_args(spec))
|
294
|
-
)
|
295
|
-
|
296
|
-
|
297
|
-
def get_optional_alias_arg(spec: ta.Any) -> ta.Any:
|
298
|
-
[it] = [it for it in ta.get_args(spec) if it not in (None, type(None))]
|
299
|
-
return it
|
300
|
-
|
301
|
-
|
302
|
-
def is_new_type(spec: ta.Any) -> bool:
|
303
|
-
if isinstance(ta.NewType, type):
|
304
|
-
return isinstance(spec, ta.NewType)
|
305
|
-
else:
|
306
|
-
# Before https://github.com/python/cpython/commit/c2f33dfc83ab270412bf243fb21f724037effa1a
|
307
|
-
return isinstance(spec, types.FunctionType) and spec.__code__ is ta.NewType.__code__.co_consts[1] # type: ignore # noqa
|
308
|
-
|
309
|
-
|
310
|
-
def deep_subclasses(cls: ta.Type[T]) -> ta.Iterator[ta.Type[T]]:
|
311
|
-
seen = set()
|
312
|
-
todo = list(reversed(cls.__subclasses__()))
|
313
|
-
while todo:
|
314
|
-
cur = todo.pop()
|
315
|
-
if cur in seen:
|
316
|
-
continue
|
317
|
-
seen.add(cur)
|
318
|
-
yield cur
|
319
|
-
todo.extend(reversed(cur.__subclasses__()))
|
320
|
-
|
321
|
-
|
322
|
-
########################################
|
323
|
-
# ../../../../../omlish/lite/logs.py
|
324
|
-
"""
|
325
|
-
TODO:
|
326
|
-
- translate json keys
|
327
|
-
- debug
|
328
|
-
"""
|
329
|
-
|
330
|
-
|
331
|
-
log = logging.getLogger(__name__)
|
332
|
-
|
333
|
-
|
334
|
-
##
|
335
|
-
|
336
|
-
|
337
|
-
class TidLogFilter(logging.Filter):
|
338
|
-
|
339
|
-
def filter(self, record):
|
340
|
-
record.tid = threading.get_native_id()
|
341
|
-
return True
|
342
|
-
|
343
|
-
|
344
|
-
##
|
345
|
-
|
346
|
-
|
347
|
-
class JsonLogFormatter(logging.Formatter):
|
348
|
-
|
349
|
-
KEYS: ta.Mapping[str, bool] = {
|
350
|
-
'name': False,
|
351
|
-
'msg': False,
|
352
|
-
'args': False,
|
353
|
-
'levelname': False,
|
354
|
-
'levelno': False,
|
355
|
-
'pathname': False,
|
356
|
-
'filename': False,
|
357
|
-
'module': False,
|
358
|
-
'exc_info': True,
|
359
|
-
'exc_text': True,
|
360
|
-
'stack_info': True,
|
361
|
-
'lineno': False,
|
362
|
-
'funcName': False,
|
363
|
-
'created': False,
|
364
|
-
'msecs': False,
|
365
|
-
'relativeCreated': False,
|
366
|
-
'thread': False,
|
367
|
-
'threadName': False,
|
368
|
-
'processName': False,
|
369
|
-
'process': False,
|
370
|
-
}
|
371
|
-
|
372
|
-
def format(self, record: logging.LogRecord) -> str:
|
373
|
-
dct = {
|
374
|
-
k: v
|
375
|
-
for k, o in self.KEYS.items()
|
376
|
-
for v in [getattr(record, k)]
|
377
|
-
if not (o and v is None)
|
378
|
-
}
|
379
|
-
return json_dumps_compact(dct)
|
380
|
-
|
381
|
-
|
382
|
-
##
|
383
|
-
|
384
|
-
|
385
|
-
STANDARD_LOG_FORMAT_PARTS = [
|
386
|
-
('asctime', '%(asctime)-15s'),
|
387
|
-
('process', 'pid=%(process)-6s'),
|
388
|
-
('thread', 'tid=%(thread)x'),
|
389
|
-
('levelname', '%(levelname)s'),
|
390
|
-
('name', '%(name)s'),
|
391
|
-
('separator', '::'),
|
392
|
-
('message', '%(message)s'),
|
393
|
-
]
|
394
|
-
|
395
|
-
|
396
|
-
class StandardLogFormatter(logging.Formatter):
|
397
|
-
|
398
|
-
@staticmethod
|
399
|
-
def build_log_format(parts: ta.Iterable[ta.Tuple[str, str]]) -> str:
|
400
|
-
return ' '.join(v for k, v in parts)
|
401
|
-
|
402
|
-
converter = datetime.datetime.fromtimestamp # type: ignore
|
403
|
-
|
404
|
-
def formatTime(self, record, datefmt=None):
|
405
|
-
ct = self.converter(record.created) # type: ignore
|
406
|
-
if datefmt:
|
407
|
-
return ct.strftime(datefmt) # noqa
|
408
|
-
else:
|
409
|
-
t = ct.strftime('%Y-%m-%d %H:%M:%S')
|
410
|
-
return '%s.%03d' % (t, record.msecs) # noqa
|
411
|
-
|
412
|
-
|
413
|
-
##
|
414
|
-
|
415
|
-
|
416
|
-
class ProxyLogFilterer(logging.Filterer):
|
417
|
-
def __init__(self, underlying: logging.Filterer) -> None: # noqa
|
418
|
-
self._underlying = underlying
|
419
|
-
|
420
|
-
@property
|
421
|
-
def underlying(self) -> logging.Filterer:
|
422
|
-
return self._underlying
|
423
|
-
|
424
|
-
@property
|
425
|
-
def filters(self):
|
426
|
-
return self._underlying.filters
|
427
|
-
|
428
|
-
@filters.setter
|
429
|
-
def filters(self, filters):
|
430
|
-
self._underlying.filters = filters
|
431
|
-
|
432
|
-
def addFilter(self, filter): # noqa
|
433
|
-
self._underlying.addFilter(filter)
|
434
|
-
|
435
|
-
def removeFilter(self, filter): # noqa
|
436
|
-
self._underlying.removeFilter(filter)
|
437
|
-
|
438
|
-
def filter(self, record):
|
439
|
-
return self._underlying.filter(record)
|
440
|
-
|
441
|
-
|
442
|
-
class ProxyLogHandler(ProxyLogFilterer, logging.Handler):
|
443
|
-
def __init__(self, underlying: logging.Handler) -> None: # noqa
|
444
|
-
ProxyLogFilterer.__init__(self, underlying)
|
445
|
-
|
446
|
-
_underlying: logging.Handler
|
447
|
-
|
448
|
-
@property
|
449
|
-
def underlying(self) -> logging.Handler:
|
450
|
-
return self._underlying
|
451
|
-
|
452
|
-
def get_name(self):
|
453
|
-
return self._underlying.get_name()
|
454
|
-
|
455
|
-
def set_name(self, name):
|
456
|
-
self._underlying.set_name(name)
|
457
|
-
|
458
|
-
@property
|
459
|
-
def name(self):
|
460
|
-
return self._underlying.name
|
461
|
-
|
462
|
-
@property
|
463
|
-
def level(self):
|
464
|
-
return self._underlying.level
|
465
|
-
|
466
|
-
@level.setter
|
467
|
-
def level(self, level):
|
468
|
-
self._underlying.level = level
|
469
|
-
|
470
|
-
@property
|
471
|
-
def formatter(self):
|
472
|
-
return self._underlying.formatter
|
473
|
-
|
474
|
-
@formatter.setter
|
475
|
-
def formatter(self, formatter):
|
476
|
-
self._underlying.formatter = formatter
|
477
|
-
|
478
|
-
def createLock(self):
|
479
|
-
self._underlying.createLock()
|
480
|
-
|
481
|
-
def acquire(self):
|
482
|
-
self._underlying.acquire()
|
483
|
-
|
484
|
-
def release(self):
|
485
|
-
self._underlying.release()
|
486
|
-
|
487
|
-
def setLevel(self, level):
|
488
|
-
self._underlying.setLevel(level)
|
489
|
-
|
490
|
-
def format(self, record):
|
491
|
-
return self._underlying.format(record)
|
492
|
-
|
493
|
-
def emit(self, record):
|
494
|
-
self._underlying.emit(record)
|
495
|
-
|
496
|
-
def handle(self, record):
|
497
|
-
return self._underlying.handle(record)
|
498
|
-
|
499
|
-
def setFormatter(self, fmt):
|
500
|
-
self._underlying.setFormatter(fmt)
|
501
|
-
|
502
|
-
def flush(self):
|
503
|
-
self._underlying.flush()
|
504
|
-
|
505
|
-
def close(self):
|
506
|
-
self._underlying.close()
|
507
|
-
|
508
|
-
def handleError(self, record):
|
509
|
-
self._underlying.handleError(record)
|
510
|
-
|
511
|
-
|
512
|
-
##
|
513
|
-
|
514
|
-
|
515
|
-
class StandardLogHandler(ProxyLogHandler):
|
516
|
-
pass
|
517
|
-
|
518
|
-
|
519
|
-
##
|
520
|
-
|
521
|
-
|
522
|
-
@contextlib.contextmanager
|
523
|
-
def _locking_logging_module_lock() -> ta.Iterator[None]:
|
524
|
-
if hasattr(logging, '_acquireLock'):
|
525
|
-
logging._acquireLock() # noqa
|
526
|
-
try:
|
527
|
-
yield
|
528
|
-
finally:
|
529
|
-
logging._releaseLock() # type: ignore # noqa
|
530
|
-
|
531
|
-
elif hasattr(logging, '_lock'):
|
532
|
-
# https://github.com/python/cpython/commit/74723e11109a320e628898817ab449b3dad9ee96
|
533
|
-
with logging._lock: # noqa
|
534
|
-
yield
|
535
|
-
|
536
|
-
else:
|
537
|
-
raise Exception("Can't find lock in logging module")
|
538
|
-
|
539
|
-
|
540
|
-
def configure_standard_logging(
|
541
|
-
level: ta.Union[int, str] = logging.INFO,
|
542
|
-
*,
|
543
|
-
json: bool = False,
|
544
|
-
target: ta.Optional[logging.Logger] = None,
|
545
|
-
force: bool = False,
|
546
|
-
handler_factory: ta.Optional[ta.Callable[[], logging.Handler]] = None,
|
547
|
-
) -> ta.Optional[StandardLogHandler]:
|
548
|
-
with _locking_logging_module_lock():
|
549
|
-
if target is None:
|
550
|
-
target = logging.root
|
551
|
-
|
552
|
-
#
|
553
|
-
|
554
|
-
if not force:
|
555
|
-
if any(isinstance(h, StandardLogHandler) for h in list(target.handlers)):
|
556
|
-
return None
|
557
|
-
|
558
|
-
#
|
559
|
-
|
560
|
-
if handler_factory is not None:
|
561
|
-
handler = handler_factory()
|
562
|
-
else:
|
563
|
-
handler = logging.StreamHandler()
|
564
|
-
|
565
|
-
#
|
566
|
-
|
567
|
-
formatter: logging.Formatter
|
568
|
-
if json:
|
569
|
-
formatter = JsonLogFormatter()
|
570
|
-
else:
|
571
|
-
formatter = StandardLogFormatter(StandardLogFormatter.build_log_format(STANDARD_LOG_FORMAT_PARTS))
|
572
|
-
handler.setFormatter(formatter)
|
573
|
-
|
574
|
-
#
|
575
|
-
|
576
|
-
handler.addFilter(TidLogFilter())
|
577
|
-
|
578
|
-
#
|
579
|
-
|
580
|
-
target.addHandler(handler)
|
581
|
-
|
582
|
-
#
|
583
|
-
|
584
|
-
if level is not None:
|
585
|
-
target.setLevel(level)
|
586
|
-
|
587
|
-
#
|
588
|
-
|
589
|
-
return StandardLogHandler(handler)
|
590
|
-
|
591
|
-
|
592
|
-
########################################
|
593
|
-
# ../../../../../omlish/lite/marshal.py
|
594
|
-
"""
|
595
|
-
TODO:
|
596
|
-
- pickle stdlib objs? have to pin to 3.8 pickle protocol, will be cross-version
|
597
|
-
- nonstrict toggle
|
598
|
-
"""
|
599
|
-
|
600
|
-
|
601
|
-
##
|
602
|
-
|
603
|
-
|
604
|
-
class ObjMarshaler(abc.ABC):
|
605
|
-
@abc.abstractmethod
|
606
|
-
def marshal(self, o: ta.Any) -> ta.Any:
|
607
|
-
raise NotImplementedError
|
608
|
-
|
609
|
-
@abc.abstractmethod
|
610
|
-
def unmarshal(self, o: ta.Any) -> ta.Any:
|
611
|
-
raise NotImplementedError
|
612
|
-
|
613
|
-
|
614
|
-
class NopObjMarshaler(ObjMarshaler):
|
615
|
-
def marshal(self, o: ta.Any) -> ta.Any:
|
616
|
-
return o
|
617
|
-
|
618
|
-
def unmarshal(self, o: ta.Any) -> ta.Any:
|
619
|
-
return o
|
620
|
-
|
621
|
-
|
622
|
-
@dc.dataclass()
|
623
|
-
class ProxyObjMarshaler(ObjMarshaler):
|
624
|
-
m: ta.Optional[ObjMarshaler] = None
|
625
|
-
|
626
|
-
def marshal(self, o: ta.Any) -> ta.Any:
|
627
|
-
return check_not_none(self.m).marshal(o)
|
628
|
-
|
629
|
-
def unmarshal(self, o: ta.Any) -> ta.Any:
|
630
|
-
return check_not_none(self.m).unmarshal(o)
|
631
|
-
|
632
|
-
|
633
|
-
@dc.dataclass(frozen=True)
|
634
|
-
class CastObjMarshaler(ObjMarshaler):
|
635
|
-
ty: type
|
636
|
-
|
637
|
-
def marshal(self, o: ta.Any) -> ta.Any:
|
638
|
-
return o
|
639
|
-
|
640
|
-
def unmarshal(self, o: ta.Any) -> ta.Any:
|
641
|
-
return self.ty(o)
|
642
|
-
|
643
|
-
|
644
|
-
class DynamicObjMarshaler(ObjMarshaler):
|
645
|
-
def marshal(self, o: ta.Any) -> ta.Any:
|
646
|
-
return marshal_obj(o)
|
647
|
-
|
648
|
-
def unmarshal(self, o: ta.Any) -> ta.Any:
|
649
|
-
return o
|
650
|
-
|
651
|
-
|
652
|
-
@dc.dataclass(frozen=True)
|
653
|
-
class Base64ObjMarshaler(ObjMarshaler):
|
654
|
-
ty: type
|
655
|
-
|
656
|
-
def marshal(self, o: ta.Any) -> ta.Any:
|
657
|
-
return base64.b64encode(o).decode('ascii')
|
658
|
-
|
659
|
-
def unmarshal(self, o: ta.Any) -> ta.Any:
|
660
|
-
return self.ty(base64.b64decode(o))
|
661
|
-
|
662
|
-
|
663
|
-
@dc.dataclass(frozen=True)
|
664
|
-
class EnumObjMarshaler(ObjMarshaler):
|
665
|
-
ty: type
|
666
|
-
|
667
|
-
def marshal(self, o: ta.Any) -> ta.Any:
|
668
|
-
return o.name
|
669
|
-
|
670
|
-
def unmarshal(self, o: ta.Any) -> ta.Any:
|
671
|
-
return self.ty.__members__[o] # type: ignore
|
672
|
-
|
673
|
-
|
674
|
-
@dc.dataclass(frozen=True)
|
675
|
-
class OptionalObjMarshaler(ObjMarshaler):
|
676
|
-
item: ObjMarshaler
|
677
|
-
|
678
|
-
def marshal(self, o: ta.Any) -> ta.Any:
|
679
|
-
if o is None:
|
680
|
-
return None
|
681
|
-
return self.item.marshal(o)
|
682
|
-
|
683
|
-
def unmarshal(self, o: ta.Any) -> ta.Any:
|
684
|
-
if o is None:
|
685
|
-
return None
|
686
|
-
return self.item.unmarshal(o)
|
687
|
-
|
688
|
-
|
689
|
-
@dc.dataclass(frozen=True)
|
690
|
-
class MappingObjMarshaler(ObjMarshaler):
|
691
|
-
ty: type
|
692
|
-
km: ObjMarshaler
|
693
|
-
vm: ObjMarshaler
|
694
|
-
|
695
|
-
def marshal(self, o: ta.Any) -> ta.Any:
|
696
|
-
return {self.km.marshal(k): self.vm.marshal(v) for k, v in o.items()}
|
697
|
-
|
698
|
-
def unmarshal(self, o: ta.Any) -> ta.Any:
|
699
|
-
return self.ty((self.km.unmarshal(k), self.vm.unmarshal(v)) for k, v in o.items())
|
700
|
-
|
701
|
-
|
702
|
-
@dc.dataclass(frozen=True)
|
703
|
-
class IterableObjMarshaler(ObjMarshaler):
|
704
|
-
ty: type
|
705
|
-
item: ObjMarshaler
|
706
|
-
|
707
|
-
def marshal(self, o: ta.Any) -> ta.Any:
|
708
|
-
return [self.item.marshal(e) for e in o]
|
709
|
-
|
710
|
-
def unmarshal(self, o: ta.Any) -> ta.Any:
|
711
|
-
return self.ty(self.item.unmarshal(e) for e in o)
|
712
|
-
|
713
|
-
|
714
|
-
@dc.dataclass(frozen=True)
|
715
|
-
class DataclassObjMarshaler(ObjMarshaler):
|
716
|
-
ty: type
|
717
|
-
fs: ta.Mapping[str, ObjMarshaler]
|
718
|
-
nonstrict: bool = False
|
719
|
-
|
720
|
-
def marshal(self, o: ta.Any) -> ta.Any:
|
721
|
-
return {k: m.marshal(getattr(o, k)) for k, m in self.fs.items()}
|
722
|
-
|
723
|
-
def unmarshal(self, o: ta.Any) -> ta.Any:
|
724
|
-
return self.ty(**{k: self.fs[k].unmarshal(v) for k, v in o.items() if not self.nonstrict or k in self.fs})
|
725
|
-
|
726
|
-
|
727
|
-
@dc.dataclass(frozen=True)
|
728
|
-
class PolymorphicObjMarshaler(ObjMarshaler):
|
729
|
-
class Impl(ta.NamedTuple):
|
730
|
-
ty: type
|
731
|
-
tag: str
|
732
|
-
m: ObjMarshaler
|
733
|
-
|
734
|
-
impls_by_ty: ta.Mapping[type, Impl]
|
735
|
-
impls_by_tag: ta.Mapping[str, Impl]
|
736
|
-
|
737
|
-
@classmethod
|
738
|
-
def of(cls, impls: ta.Iterable[Impl]) -> 'PolymorphicObjMarshaler':
|
739
|
-
return cls(
|
740
|
-
{i.ty: i for i in impls},
|
741
|
-
{i.tag: i for i in impls},
|
742
|
-
)
|
743
|
-
|
744
|
-
def marshal(self, o: ta.Any) -> ta.Any:
|
745
|
-
impl = self.impls_by_ty[type(o)]
|
746
|
-
return {impl.tag: impl.m.marshal(o)}
|
747
|
-
|
748
|
-
def unmarshal(self, o: ta.Any) -> ta.Any:
|
749
|
-
[(t, v)] = o.items()
|
750
|
-
impl = self.impls_by_tag[t]
|
751
|
-
return impl.m.unmarshal(v)
|
752
|
-
|
753
|
-
|
754
|
-
@dc.dataclass(frozen=True)
|
755
|
-
class DatetimeObjMarshaler(ObjMarshaler):
|
756
|
-
ty: type
|
757
|
-
|
758
|
-
def marshal(self, o: ta.Any) -> ta.Any:
|
759
|
-
return o.isoformat()
|
760
|
-
|
761
|
-
def unmarshal(self, o: ta.Any) -> ta.Any:
|
762
|
-
return self.ty.fromisoformat(o) # type: ignore
|
763
|
-
|
764
|
-
|
765
|
-
class DecimalObjMarshaler(ObjMarshaler):
|
766
|
-
def marshal(self, o: ta.Any) -> ta.Any:
|
767
|
-
return str(check_isinstance(o, decimal.Decimal))
|
768
|
-
|
769
|
-
def unmarshal(self, v: ta.Any) -> ta.Any:
|
770
|
-
return decimal.Decimal(check_isinstance(v, str))
|
771
|
-
|
772
|
-
|
773
|
-
class FractionObjMarshaler(ObjMarshaler):
|
774
|
-
def marshal(self, o: ta.Any) -> ta.Any:
|
775
|
-
fr = check_isinstance(o, fractions.Fraction)
|
776
|
-
return [fr.numerator, fr.denominator]
|
777
|
-
|
778
|
-
def unmarshal(self, v: ta.Any) -> ta.Any:
|
779
|
-
num, denom = check_isinstance(v, list)
|
780
|
-
return fractions.Fraction(num, denom)
|
781
|
-
|
782
|
-
|
783
|
-
class UuidObjMarshaler(ObjMarshaler):
|
784
|
-
def marshal(self, o: ta.Any) -> ta.Any:
|
785
|
-
return str(o)
|
786
|
-
|
787
|
-
def unmarshal(self, o: ta.Any) -> ta.Any:
|
788
|
-
return uuid.UUID(o)
|
789
|
-
|
790
|
-
|
791
|
-
##
|
792
|
-
|
793
|
-
|
794
|
-
_DEFAULT_OBJ_MARSHALERS: ta.Dict[ta.Any, ObjMarshaler] = {
|
795
|
-
**{t: NopObjMarshaler() for t in (type(None),)},
|
796
|
-
**{t: CastObjMarshaler(t) for t in (int, float, str, bool)},
|
797
|
-
**{t: Base64ObjMarshaler(t) for t in (bytes, bytearray)},
|
798
|
-
**{t: IterableObjMarshaler(t, DynamicObjMarshaler()) for t in (list, tuple, set, frozenset)},
|
799
|
-
**{t: MappingObjMarshaler(t, DynamicObjMarshaler(), DynamicObjMarshaler()) for t in (dict,)},
|
800
|
-
|
801
|
-
ta.Any: DynamicObjMarshaler(),
|
802
|
-
|
803
|
-
**{t: DatetimeObjMarshaler(t) for t in (datetime.date, datetime.time, datetime.datetime)},
|
804
|
-
decimal.Decimal: DecimalObjMarshaler(),
|
805
|
-
fractions.Fraction: FractionObjMarshaler(),
|
806
|
-
uuid.UUID: UuidObjMarshaler(),
|
807
|
-
}
|
808
|
-
|
809
|
-
_OBJ_MARSHALER_GENERIC_MAPPING_TYPES: ta.Dict[ta.Any, type] = {
|
810
|
-
**{t: t for t in (dict,)},
|
811
|
-
**{t: dict for t in (collections.abc.Mapping, collections.abc.MutableMapping)},
|
812
|
-
}
|
813
|
-
|
814
|
-
_OBJ_MARSHALER_GENERIC_ITERABLE_TYPES: ta.Dict[ta.Any, type] = {
|
815
|
-
**{t: t for t in (list, tuple, set, frozenset)},
|
816
|
-
collections.abc.Set: frozenset,
|
817
|
-
collections.abc.MutableSet: set,
|
818
|
-
collections.abc.Sequence: tuple,
|
819
|
-
collections.abc.MutableSequence: list,
|
820
|
-
}
|
821
|
-
|
822
|
-
|
823
|
-
def _make_obj_marshaler(
|
824
|
-
ty: ta.Any,
|
825
|
-
rec: ta.Callable[[ta.Any], ObjMarshaler],
|
826
|
-
*,
|
827
|
-
nonstrict_dataclasses: bool = False,
|
828
|
-
) -> ObjMarshaler:
|
829
|
-
if isinstance(ty, type):
|
830
|
-
if abc.ABC in ty.__bases__:
|
831
|
-
return PolymorphicObjMarshaler.of([ # type: ignore
|
832
|
-
PolymorphicObjMarshaler.Impl(
|
833
|
-
ity,
|
834
|
-
ity.__qualname__,
|
835
|
-
rec(ity),
|
836
|
-
)
|
837
|
-
for ity in deep_subclasses(ty)
|
838
|
-
if abc.ABC not in ity.__bases__
|
839
|
-
])
|
840
|
-
|
841
|
-
if issubclass(ty, enum.Enum):
|
842
|
-
return EnumObjMarshaler(ty)
|
843
|
-
|
844
|
-
if dc.is_dataclass(ty):
|
845
|
-
return DataclassObjMarshaler(
|
846
|
-
ty,
|
847
|
-
{f.name: rec(f.type) for f in dc.fields(ty)},
|
848
|
-
nonstrict=nonstrict_dataclasses,
|
849
|
-
)
|
850
|
-
|
851
|
-
if is_generic_alias(ty):
|
852
|
-
try:
|
853
|
-
mt = _OBJ_MARSHALER_GENERIC_MAPPING_TYPES[ta.get_origin(ty)]
|
854
|
-
except KeyError:
|
855
|
-
pass
|
856
|
-
else:
|
857
|
-
k, v = ta.get_args(ty)
|
858
|
-
return MappingObjMarshaler(mt, rec(k), rec(v))
|
859
|
-
|
860
|
-
try:
|
861
|
-
st = _OBJ_MARSHALER_GENERIC_ITERABLE_TYPES[ta.get_origin(ty)]
|
862
|
-
except KeyError:
|
863
|
-
pass
|
864
|
-
else:
|
865
|
-
[e] = ta.get_args(ty)
|
866
|
-
return IterableObjMarshaler(st, rec(e))
|
867
|
-
|
868
|
-
if is_union_alias(ty):
|
869
|
-
return OptionalObjMarshaler(rec(get_optional_alias_arg(ty)))
|
870
|
-
|
871
|
-
raise TypeError(ty)
|
872
|
-
|
873
|
-
|
874
|
-
##
|
875
|
-
|
876
|
-
|
877
|
-
_OBJ_MARSHALERS_LOCK = threading.RLock()
|
878
|
-
|
879
|
-
_OBJ_MARSHALERS: ta.Dict[ta.Any, ObjMarshaler] = dict(_DEFAULT_OBJ_MARSHALERS)
|
880
|
-
|
881
|
-
_OBJ_MARSHALER_PROXIES: ta.Dict[ta.Any, ProxyObjMarshaler] = {}
|
882
|
-
|
883
|
-
|
884
|
-
def register_opj_marshaler(ty: ta.Any, m: ObjMarshaler) -> None:
|
885
|
-
with _OBJ_MARSHALERS_LOCK:
|
886
|
-
if ty in _OBJ_MARSHALERS:
|
887
|
-
raise KeyError(ty)
|
888
|
-
_OBJ_MARSHALERS[ty] = m
|
889
|
-
|
890
|
-
|
891
|
-
def get_obj_marshaler(
|
892
|
-
ty: ta.Any,
|
893
|
-
*,
|
894
|
-
no_cache: bool = False,
|
895
|
-
**kwargs: ta.Any,
|
896
|
-
) -> ObjMarshaler:
|
897
|
-
with _OBJ_MARSHALERS_LOCK:
|
898
|
-
if not no_cache:
|
899
|
-
try:
|
900
|
-
return _OBJ_MARSHALERS[ty]
|
901
|
-
except KeyError:
|
902
|
-
pass
|
903
|
-
|
904
|
-
try:
|
905
|
-
return _OBJ_MARSHALER_PROXIES[ty]
|
906
|
-
except KeyError:
|
907
|
-
pass
|
908
|
-
|
909
|
-
rec = functools.partial(
|
910
|
-
get_obj_marshaler,
|
911
|
-
no_cache=no_cache,
|
912
|
-
**kwargs,
|
913
|
-
)
|
914
|
-
|
915
|
-
p = ProxyObjMarshaler()
|
916
|
-
_OBJ_MARSHALER_PROXIES[ty] = p
|
917
|
-
try:
|
918
|
-
m = _make_obj_marshaler(ty, rec, **kwargs)
|
919
|
-
finally:
|
920
|
-
del _OBJ_MARSHALER_PROXIES[ty]
|
921
|
-
p.m = m
|
922
|
-
|
923
|
-
if not no_cache:
|
924
|
-
_OBJ_MARSHALERS[ty] = m
|
925
|
-
return m
|
926
|
-
|
927
|
-
|
928
|
-
##
|
929
|
-
|
930
|
-
|
931
|
-
def marshal_obj(o: ta.Any, ty: ta.Any = None) -> ta.Any:
|
932
|
-
return get_obj_marshaler(ty if ty is not None else type(o)).marshal(o)
|
933
|
-
|
934
|
-
|
935
|
-
def unmarshal_obj(o: ta.Any, ty: ta.Union[ta.Type[T], ta.Any]) -> T:
|
936
|
-
return get_obj_marshaler(ty).unmarshal(o)
|
937
|
-
|
938
|
-
|
939
|
-
########################################
|
940
|
-
# ../../../../../omlish/lite/runtime.py
|
941
|
-
|
942
|
-
|
943
|
-
@cached_nullary
|
944
|
-
def is_debugger_attached() -> bool:
|
945
|
-
return any(frame[1].endswith('pydevd.py') for frame in inspect.stack())
|
946
|
-
|
947
|
-
|
948
|
-
REQUIRED_PYTHON_VERSION = (3, 8)
|
949
|
-
|
950
|
-
|
951
|
-
def check_runtime_version() -> None:
|
952
|
-
if sys.version_info < REQUIRED_PYTHON_VERSION:
|
953
|
-
raise OSError(f'Requires python {REQUIRED_PYTHON_VERSION}, got {sys.version_info} from {sys.executable}') # noqa
|
954
|
-
|
955
|
-
|
956
|
-
########################################
|
957
|
-
# ../../../../../omlish/lite/subprocesses.py
|
958
|
-
|
959
|
-
|
960
|
-
##
|
961
|
-
|
962
|
-
|
963
|
-
_SUBPROCESS_SHELL_WRAP_EXECS = False
|
964
|
-
|
965
|
-
|
966
|
-
def subprocess_shell_wrap_exec(*args: str) -> ta.Tuple[str, ...]:
|
967
|
-
return ('sh', '-c', ' '.join(map(shlex.quote, args)))
|
968
|
-
|
969
|
-
|
970
|
-
def subprocess_maybe_shell_wrap_exec(*args: str) -> ta.Tuple[str, ...]:
|
971
|
-
if _SUBPROCESS_SHELL_WRAP_EXECS or is_debugger_attached():
|
972
|
-
return subprocess_shell_wrap_exec(*args)
|
973
|
-
else:
|
974
|
-
return args
|
975
|
-
|
976
|
-
|
977
|
-
def _prepare_subprocess_invocation(
|
978
|
-
*args: str,
|
979
|
-
env: ta.Optional[ta.Mapping[str, ta.Any]] = None,
|
980
|
-
extra_env: ta.Optional[ta.Mapping[str, ta.Any]] = None,
|
981
|
-
quiet: bool = False,
|
982
|
-
shell: bool = False,
|
983
|
-
**kwargs: ta.Any,
|
984
|
-
) -> ta.Tuple[ta.Tuple[ta.Any, ...], ta.Dict[str, ta.Any]]:
|
985
|
-
log.debug(args)
|
986
|
-
if extra_env:
|
987
|
-
log.debug(extra_env)
|
988
|
-
|
989
|
-
if extra_env:
|
990
|
-
env = {**(env if env is not None else os.environ), **extra_env}
|
991
|
-
|
992
|
-
if quiet and 'stderr' not in kwargs:
|
993
|
-
if not log.isEnabledFor(logging.DEBUG):
|
994
|
-
kwargs['stderr'] = subprocess.DEVNULL
|
995
|
-
|
996
|
-
if not shell:
|
997
|
-
args = subprocess_maybe_shell_wrap_exec(*args)
|
998
|
-
|
999
|
-
return args, dict(
|
1000
|
-
env=env,
|
1001
|
-
shell=shell,
|
1002
|
-
**kwargs,
|
1003
|
-
)
|
1004
|
-
|
1005
|
-
|
1006
|
-
def subprocess_check_call(*args: str, stdout=sys.stderr, **kwargs: ta.Any) -> None:
|
1007
|
-
args, kwargs = _prepare_subprocess_invocation(*args, stdout=stdout, **kwargs)
|
1008
|
-
return subprocess.check_call(args, **kwargs) # type: ignore
|
1009
|
-
|
1010
|
-
|
1011
|
-
def subprocess_check_output(*args: str, **kwargs: ta.Any) -> bytes:
|
1012
|
-
args, kwargs = _prepare_subprocess_invocation(*args, **kwargs)
|
1013
|
-
return subprocess.check_output(args, **kwargs)
|
1014
|
-
|
1015
|
-
|
1016
|
-
def subprocess_check_output_str(*args: str, **kwargs: ta.Any) -> str:
|
1017
|
-
return subprocess_check_output(*args, **kwargs).decode().strip()
|
1018
|
-
|
1019
|
-
|
1020
|
-
##
|
1021
|
-
|
1022
|
-
|
1023
|
-
DEFAULT_SUBPROCESS_TRY_EXCEPTIONS: ta.Tuple[ta.Type[Exception], ...] = (
|
1024
|
-
FileNotFoundError,
|
1025
|
-
subprocess.CalledProcessError,
|
1026
|
-
)
|
1027
|
-
|
1028
|
-
|
1029
|
-
def subprocess_try_call(
|
1030
|
-
*args: str,
|
1031
|
-
try_exceptions: ta.Tuple[ta.Type[Exception], ...] = DEFAULT_SUBPROCESS_TRY_EXCEPTIONS,
|
1032
|
-
**kwargs: ta.Any,
|
1033
|
-
) -> bool:
|
1034
|
-
try:
|
1035
|
-
subprocess_check_call(*args, **kwargs)
|
1036
|
-
except try_exceptions as e: # noqa
|
1037
|
-
if log.isEnabledFor(logging.DEBUG):
|
1038
|
-
log.exception('command failed')
|
1039
|
-
return False
|
1040
|
-
else:
|
1041
|
-
return True
|
1042
|
-
|
1043
|
-
|
1044
|
-
def subprocess_try_output(
|
1045
|
-
*args: str,
|
1046
|
-
try_exceptions: ta.Tuple[ta.Type[Exception], ...] = DEFAULT_SUBPROCESS_TRY_EXCEPTIONS,
|
1047
|
-
**kwargs: ta.Any,
|
1048
|
-
) -> ta.Optional[bytes]:
|
1049
|
-
try:
|
1050
|
-
return subprocess_check_output(*args, **kwargs)
|
1051
|
-
except try_exceptions as e: # noqa
|
1052
|
-
if log.isEnabledFor(logging.DEBUG):
|
1053
|
-
log.exception('command failed')
|
1054
|
-
return None
|
1055
|
-
|
1056
|
-
|
1057
|
-
def subprocess_try_output_str(*args: str, **kwargs: ta.Any) -> ta.Optional[str]:
|
1058
|
-
out = subprocess_try_output(*args, **kwargs)
|
1059
|
-
return out.decode().strip() if out is not None else None
|
1060
|
-
|
1061
|
-
|
1062
|
-
##
|
1063
|
-
|
1064
|
-
|
1065
|
-
def subprocess_close(
|
1066
|
-
proc: subprocess.Popen,
|
1067
|
-
timeout: ta.Optional[float] = None,
|
1068
|
-
) -> None:
|
1069
|
-
# TODO: terminate, sleep, kill
|
1070
|
-
if proc.stdout:
|
1071
|
-
proc.stdout.close()
|
1072
|
-
if proc.stderr:
|
1073
|
-
proc.stderr.close()
|
1074
|
-
if proc.stdin:
|
1075
|
-
proc.stdin.close()
|
1076
|
-
|
1077
|
-
proc.wait(timeout)
|
1078
|
-
|
1079
|
-
|
1080
|
-
########################################
|
1081
|
-
# ../base.py
|
1082
|
-
|
1083
|
-
|
1084
|
-
##
|
1085
|
-
|
1086
|
-
|
1087
|
-
class Phase(enum.Enum):
|
1088
|
-
HOST = enum.auto()
|
1089
|
-
ENV = enum.auto()
|
1090
|
-
BACKEND = enum.auto()
|
1091
|
-
FRONTEND = enum.auto()
|
1092
|
-
START_BACKEND = enum.auto()
|
1093
|
-
START_FRONTEND = enum.auto()
|
1094
|
-
|
1095
|
-
|
1096
|
-
def run_in_phase(*ps: Phase):
|
1097
|
-
def inner(fn):
|
1098
|
-
fn.__deployment_phases__ = ps
|
1099
|
-
return fn
|
1100
|
-
return inner
|
1101
|
-
|
1102
|
-
|
1103
|
-
class Concern(abc.ABC):
|
1104
|
-
def __init__(self, d: 'Deployment') -> None:
|
1105
|
-
super().__init__()
|
1106
|
-
self._d = d
|
1107
|
-
|
1108
|
-
_phase_fns: ta.ClassVar[ta.Mapping[Phase, ta.Sequence[ta.Callable]]]
|
1109
|
-
|
1110
|
-
def __init_subclass__(cls, **kwargs):
|
1111
|
-
super().__init_subclass__(**kwargs)
|
1112
|
-
dct: ta.Dict[Phase, ta.List[ta.Callable]] = {}
|
1113
|
-
for fn, ps in [
|
1114
|
-
(v, ps)
|
1115
|
-
for a in dir(cls)
|
1116
|
-
if not (a.startswith('__') and a.endswith('__'))
|
1117
|
-
for v in [getattr(cls, a, None)]
|
1118
|
-
for ps in [getattr(v, '__deployment_phases__', None)]
|
1119
|
-
if ps
|
1120
|
-
]:
|
1121
|
-
dct.update({p: [*dct.get(p, []), fn] for p in ps})
|
1122
|
-
cls._phase_fns = dct
|
1123
|
-
|
1124
|
-
@dc.dataclass(frozen=True)
|
1125
|
-
class Output(abc.ABC):
|
1126
|
-
path: str
|
1127
|
-
is_file: bool
|
1128
|
-
|
1129
|
-
def outputs(self) -> ta.Sequence[Output]:
|
1130
|
-
return ()
|
1131
|
-
|
1132
|
-
def run_phase(self, p: Phase) -> None:
|
1133
|
-
for fn in self._phase_fns.get(p, ()):
|
1134
|
-
fn.__get__(self, type(self))()
|
1135
|
-
|
1136
|
-
|
1137
|
-
##
|
1138
|
-
|
1139
|
-
|
1140
|
-
class Deployment:
|
1141
|
-
|
1142
|
-
def __init__(
|
1143
|
-
self,
|
1144
|
-
cfg: DeployConfig,
|
1145
|
-
concern_cls_list: ta.List[ta.Type[Concern]],
|
1146
|
-
host_cfg: HostConfig = HostConfig(),
|
1147
|
-
) -> None:
|
1148
|
-
super().__init__()
|
1149
|
-
self._cfg = cfg
|
1150
|
-
self._host_cfg = host_cfg
|
1151
|
-
|
1152
|
-
self._concerns: ta.List[Concern] = [cls(self) for cls in concern_cls_list]
|
1153
|
-
|
1154
|
-
@property
|
1155
|
-
def cfg(self) -> DeployConfig:
|
1156
|
-
return self._cfg
|
1157
|
-
|
1158
|
-
@property
|
1159
|
-
def host_cfg(self) -> HostConfig:
|
1160
|
-
return self._host_cfg
|
1161
|
-
|
1162
|
-
def sh(self, *ss: str) -> None:
|
1163
|
-
s = ' && '.join(ss)
|
1164
|
-
log.info('Executing: %s', s)
|
1165
|
-
subprocess_check_call(s, shell=True)
|
1166
|
-
|
1167
|
-
def ush(self, *ss: str) -> None:
|
1168
|
-
s = ' && '.join(ss)
|
1169
|
-
self.sh(f'su - {self._host_cfg.username} -c {shlex.quote(s)}')
|
1170
|
-
|
1171
|
-
@cached_nullary
|
1172
|
-
def home_dir(self) -> str:
|
1173
|
-
return os.path.expanduser(f'~{self._host_cfg.username}')
|
1174
|
-
|
1175
|
-
@cached_nullary
|
1176
|
-
def deploy(self) -> None:
|
1177
|
-
for p in Phase:
|
1178
|
-
log.info('Phase %s', p.name)
|
1179
|
-
for c in self._concerns:
|
1180
|
-
c.run_phase(p)
|
1181
|
-
|
1182
|
-
log.info('Shitty deploy complete!')
|
1183
|
-
|
1184
|
-
|
1185
|
-
########################################
|
1186
|
-
# ../concerns/dirs.py
|
1187
|
-
|
1188
|
-
|
1189
|
-
class DirsConcern(Concern):
|
1190
|
-
@run_in_phase(Phase.HOST)
|
1191
|
-
def create_dirs(self) -> None:
|
1192
|
-
pwn = pwd.getpwnam(self._d.host_cfg.username)
|
1193
|
-
|
1194
|
-
for dn in [
|
1195
|
-
'app',
|
1196
|
-
'conf',
|
1197
|
-
'conf/env',
|
1198
|
-
'conf/nginx',
|
1199
|
-
'conf/supervisor',
|
1200
|
-
'venv',
|
1201
|
-
]:
|
1202
|
-
fp = os.path.join(self._d.home_dir(), dn)
|
1203
|
-
if not os.path.exists(fp):
|
1204
|
-
log.info('Creating directory: %s', fp)
|
1205
|
-
os.mkdir(fp)
|
1206
|
-
os.chown(fp, pwn.pw_uid, pwn.pw_gid)
|
1207
|
-
|
1208
|
-
|
1209
|
-
########################################
|
1210
|
-
# ../concerns/nginx.py
|
1211
|
-
"""
|
1212
|
-
TODO:
|
1213
|
-
- https://stackoverflow.com/questions/3011067/restart-nginx-without-sudo
|
1214
|
-
"""
|
1215
|
-
|
1216
|
-
|
1217
|
-
class GlobalNginxConcern(Concern):
|
1218
|
-
@run_in_phase(Phase.HOST)
|
1219
|
-
def create_global_nginx_conf(self) -> None:
|
1220
|
-
nginx_conf_dir = os.path.join(self._d.home_dir(), 'conf/nginx')
|
1221
|
-
if not os.path.isfile(self._d.host_cfg.global_nginx_conf_file_path):
|
1222
|
-
log.info('Writing global nginx conf at %s', self._d.host_cfg.global_nginx_conf_file_path)
|
1223
|
-
with open(self._d.host_cfg.global_nginx_conf_file_path, 'w') as f:
|
1224
|
-
f.write(f'include {nginx_conf_dir}/*.conf;\n')
|
1225
|
-
|
1226
|
-
|
1227
|
-
class NginxConcern(Concern):
|
1228
|
-
@run_in_phase(Phase.FRONTEND)
|
1229
|
-
def create_nginx_conf(self) -> None:
|
1230
|
-
nginx_conf = textwrap.dedent(f"""
|
1231
|
-
server {{
|
1232
|
-
listen 80;
|
1233
|
-
location / {{
|
1234
|
-
proxy_pass http://127.0.0.1:8000/;
|
1235
|
-
}}
|
1236
|
-
}}
|
1237
|
-
""")
|
1238
|
-
nginx_conf_file = os.path.join(self._d.home_dir(), f'conf/nginx/{self._d.cfg.app_name}.conf')
|
1239
|
-
log.info('Writing nginx conf to %s', nginx_conf_file)
|
1240
|
-
with open(nginx_conf_file, 'w') as f:
|
1241
|
-
f.write(nginx_conf)
|
1242
|
-
|
1243
|
-
@run_in_phase(Phase.START_FRONTEND)
|
1244
|
-
def poke_nginx(self) -> None:
|
1245
|
-
log.info('Starting nginx')
|
1246
|
-
self._d.sh('service nginx start')
|
1247
|
-
|
1248
|
-
log.info('Poking nginx')
|
1249
|
-
self._d.sh('nginx -s reload')
|
1250
|
-
|
1251
|
-
|
1252
|
-
########################################
|
1253
|
-
# ../concerns/repo.py
|
1254
|
-
|
1255
|
-
|
1256
|
-
class RepoConcern(Concern):
|
1257
|
-
@run_in_phase(Phase.ENV)
|
1258
|
-
def clone_repo(self) -> None:
|
1259
|
-
clone_submodules = False
|
1260
|
-
self._d.ush(
|
1261
|
-
'cd ~/app',
|
1262
|
-
f'git clone --depth 1 {self._d.cfg.repo_url} {self._d.cfg.app_name}',
|
1263
|
-
*([
|
1264
|
-
f'cd {self._d.cfg.app_name}',
|
1265
|
-
'git submodule update --init',
|
1266
|
-
] if clone_submodules else []),
|
1267
|
-
)
|
1268
|
-
|
1269
|
-
|
1270
|
-
########################################
|
1271
|
-
# ../concerns/supervisor.py
|
1272
|
-
|
1273
|
-
|
1274
|
-
class GlobalSupervisorConcern(Concern):
|
1275
|
-
@run_in_phase(Phase.HOST)
|
1276
|
-
def create_global_supervisor_conf(self) -> None:
|
1277
|
-
sup_conf_dir = os.path.join(self._d.home_dir(), 'conf/supervisor')
|
1278
|
-
with open(self._d.host_cfg.global_supervisor_conf_file_path) as f:
|
1279
|
-
glo_sup_conf = f.read()
|
1280
|
-
if sup_conf_dir not in glo_sup_conf:
|
1281
|
-
log.info('Updating global supervisor conf at %s', self._d.host_cfg.global_supervisor_conf_file_path) # noqa
|
1282
|
-
glo_sup_conf += textwrap.dedent(f"""
|
1283
|
-
[include]
|
1284
|
-
files = {self._d.home_dir()}/conf/supervisor/*.conf
|
1285
|
-
""")
|
1286
|
-
with open(self._d.host_cfg.global_supervisor_conf_file_path, 'w') as f:
|
1287
|
-
f.write(glo_sup_conf)
|
1288
|
-
|
1289
|
-
|
1290
|
-
class SupervisorConcern(Concern):
|
1291
|
-
@run_in_phase(Phase.BACKEND)
|
1292
|
-
def create_supervisor_conf(self) -> None:
|
1293
|
-
sup_conf = textwrap.dedent(f"""
|
1294
|
-
[program:{self._d.cfg.app_name}]
|
1295
|
-
command={self._d.home_dir()}/venv/{self._d.cfg.app_name}/bin/python -m {self._d.cfg.entrypoint}
|
1296
|
-
directory={self._d.home_dir()}/app/{self._d.cfg.app_name}
|
1297
|
-
user={self._d.host_cfg.username}
|
1298
|
-
autostart=true
|
1299
|
-
autorestart=true
|
1300
|
-
""")
|
1301
|
-
sup_conf_file = os.path.join(self._d.home_dir(), f'conf/supervisor/{self._d.cfg.app_name}.conf')
|
1302
|
-
log.info('Writing supervisor conf to %s', sup_conf_file)
|
1303
|
-
with open(sup_conf_file, 'w') as f:
|
1304
|
-
f.write(sup_conf)
|
1305
|
-
|
1306
|
-
@run_in_phase(Phase.START_BACKEND)
|
1307
|
-
def poke_supervisor(self) -> None:
|
1308
|
-
log.info('Poking supervisor')
|
1309
|
-
self._d.sh('kill -HUP 1')
|
1310
|
-
|
1311
|
-
|
1312
|
-
########################################
|
1313
|
-
# ../concerns/user.py
|
1314
|
-
|
1315
|
-
|
1316
|
-
class UserConcern(Concern):
|
1317
|
-
@run_in_phase(Phase.HOST)
|
1318
|
-
def create_user(self) -> None:
|
1319
|
-
try:
|
1320
|
-
pwd.getpwnam(self._d.host_cfg.username)
|
1321
|
-
except KeyError:
|
1322
|
-
log.info('Creating user %s', self._d.host_cfg.username)
|
1323
|
-
self._d.sh(' '.join([
|
1324
|
-
'adduser',
|
1325
|
-
'--system',
|
1326
|
-
'--disabled-password',
|
1327
|
-
'--group',
|
1328
|
-
'--shell /bin/bash',
|
1329
|
-
self._d.host_cfg.username,
|
1330
|
-
]))
|
1331
|
-
pwd.getpwnam(self._d.host_cfg.username)
|
1332
|
-
|
1333
|
-
|
1334
|
-
########################################
|
1335
|
-
# ../concerns/venv.py
|
1336
|
-
"""
|
1337
|
-
TODO:
|
1338
|
-
- use LinuxInterpResolver lol
|
1339
|
-
"""
|
1340
|
-
|
1341
|
-
|
1342
|
-
class VenvConcern(Concern):
|
1343
|
-
@run_in_phase(Phase.ENV)
|
1344
|
-
def setup_venv(self) -> None:
|
1345
|
-
self._d.ush(
|
1346
|
-
'cd ~/venv',
|
1347
|
-
f'{self._d.cfg.python_bin} -mvenv {self._d.cfg.app_name}',
|
1348
|
-
|
1349
|
-
# https://stackoverflow.com/questions/77364550/attributeerror-module-pkgutil-has-no-attribute-impimporter-did-you-mean
|
1350
|
-
f'{self._d.cfg.app_name}/bin/python -m ensurepip',
|
1351
|
-
f'{self._d.cfg.app_name}/bin/python -mpip install --upgrade setuptools pip',
|
1352
|
-
|
1353
|
-
f'{self._d.cfg.app_name}/bin/python -mpip install -r ~deploy/app/{self._d.cfg.app_name}/{self._d.cfg.requirements_txt}', # noqa
|
1354
|
-
)
|
1355
|
-
|
1356
|
-
|
1357
|
-
########################################
|
1358
|
-
# main.py
|
1359
|
-
|
1360
|
-
|
1361
|
-
##
|
1362
|
-
|
1363
|
-
|
1364
|
-
def _deploy_cmd(args) -> None:
|
1365
|
-
dct = json.loads(args.cfg)
|
1366
|
-
cfg: DeployConfig = unmarshal_obj(dct, DeployConfig)
|
1367
|
-
dp = Deployment(
|
1368
|
-
cfg,
|
1369
|
-
[
|
1370
|
-
UserConcern,
|
1371
|
-
DirsConcern,
|
1372
|
-
GlobalNginxConcern,
|
1373
|
-
GlobalSupervisorConcern,
|
1374
|
-
RepoConcern,
|
1375
|
-
VenvConcern,
|
1376
|
-
SupervisorConcern,
|
1377
|
-
NginxConcern,
|
1378
|
-
],
|
1379
|
-
)
|
1380
|
-
dp.deploy()
|
1381
|
-
|
1382
|
-
|
1383
|
-
##
|
1384
|
-
|
1385
|
-
|
1386
|
-
def _build_parser() -> argparse.ArgumentParser:
|
1387
|
-
parser = argparse.ArgumentParser()
|
1388
|
-
|
1389
|
-
subparsers = parser.add_subparsers()
|
1390
|
-
|
1391
|
-
parser_resolve = subparsers.add_parser('deploy')
|
1392
|
-
parser_resolve.add_argument('cfg')
|
1393
|
-
parser_resolve.set_defaults(func=_deploy_cmd)
|
1394
|
-
|
1395
|
-
return parser
|
1396
|
-
|
1397
|
-
|
1398
|
-
def _main(argv: ta.Optional[ta.Sequence[str]] = None) -> None:
|
1399
|
-
check_runtime_version()
|
1400
|
-
|
1401
|
-
if getattr(sys, 'platform') != 'linux': # noqa
|
1402
|
-
raise OSError('must run on linux')
|
1403
|
-
|
1404
|
-
configure_standard_logging()
|
1405
|
-
|
1406
|
-
parser = _build_parser()
|
1407
|
-
args = parser.parse_args(argv)
|
1408
|
-
if not getattr(args, 'func', None):
|
1409
|
-
parser.print_help()
|
1410
|
-
else:
|
1411
|
-
args.func(args)
|
1412
|
-
|
1413
|
-
|
1414
|
-
if __name__ == '__main__':
|
1415
|
-
_main()
|