ominfra 0.0.0.dev138__py3-none-any.whl → 0.0.0.dev139__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ominfra/manage/__init__.py +13 -0
- ominfra/manage/{new/main.py → main.py} +6 -6
- ominfra/{manage/new/_manage.py → scripts/manage.py} +13 -13
- {ominfra-0.0.0.dev138.dist-info → ominfra-0.0.0.dev139.dist-info}/METADATA +3 -3
- {ominfra-0.0.0.dev138.dist-info → ominfra-0.0.0.dev139.dist-info}/RECORD +12 -41
- ominfra/manage/deploy/_executor.py +0 -1415
- ominfra/manage/deploy/configs.py +0 -19
- ominfra/manage/deploy/executor/__init__.py +0 -1
- ominfra/manage/deploy/executor/base.py +0 -115
- ominfra/manage/deploy/executor/concerns/__init__.py +0 -0
- ominfra/manage/deploy/executor/concerns/dirs.py +0 -28
- ominfra/manage/deploy/executor/concerns/nginx.py +0 -47
- ominfra/manage/deploy/executor/concerns/repo.py +0 -17
- ominfra/manage/deploy/executor/concerns/supervisor.py +0 -46
- ominfra/manage/deploy/executor/concerns/systemd.py +0 -88
- ominfra/manage/deploy/executor/concerns/user.py +0 -25
- ominfra/manage/deploy/executor/concerns/venv.py +0 -22
- ominfra/manage/deploy/executor/main.py +0 -119
- ominfra/manage/deploy/poly/__init__.py +0 -1
- ominfra/manage/deploy/poly/_main.py +0 -975
- ominfra/manage/deploy/poly/base.py +0 -178
- ominfra/manage/deploy/poly/configs.py +0 -38
- ominfra/manage/deploy/poly/deploy.py +0 -25
- ominfra/manage/deploy/poly/main.py +0 -18
- ominfra/manage/deploy/poly/nginx.py +0 -60
- ominfra/manage/deploy/poly/repo.py +0 -41
- ominfra/manage/deploy/poly/runtime.py +0 -39
- ominfra/manage/deploy/poly/site.py +0 -11
- ominfra/manage/deploy/poly/supervisor.py +0 -64
- ominfra/manage/deploy/poly/venv.py +0 -52
- ominfra/manage/deploy/remote.py +0 -91
- ominfra/manage/manage.py +0 -12
- ominfra/manage/new/__init__.py +0 -1
- ominfra/manage/new/commands/__init__.py +0 -0
- /ominfra/manage/{deploy → commands}/__init__.py +0 -0
- /ominfra/manage/{new/commands → commands}/base.py +0 -0
- /ominfra/manage/{new/commands → commands}/subprocess.py +0 -0
- {ominfra-0.0.0.dev138.dist-info → ominfra-0.0.0.dev139.dist-info}/LICENSE +0 -0
- {ominfra-0.0.0.dev138.dist-info → ominfra-0.0.0.dev139.dist-info}/WHEEL +0 -0
- {ominfra-0.0.0.dev138.dist-info → ominfra-0.0.0.dev139.dist-info}/entry_points.txt +0 -0
- {ominfra-0.0.0.dev138.dist-info → ominfra-0.0.0.dev139.dist-info}/top_level.txt +0 -0
@@ -1,975 +0,0 @@
|
|
1
|
-
#!/usr/bin/env python3
|
2
|
-
# noinspection DuplicatedCode
|
3
|
-
# @omlish-lite
|
4
|
-
# @omlish-script
|
5
|
-
# @omlish-amalg-output main.py
|
6
|
-
# ruff: noqa: N802 UP006 UP007 UP036
|
7
|
-
import abc
|
8
|
-
import contextlib
|
9
|
-
import dataclasses as dc
|
10
|
-
import datetime
|
11
|
-
import functools
|
12
|
-
import inspect
|
13
|
-
import json
|
14
|
-
import logging
|
15
|
-
import os
|
16
|
-
import os.path
|
17
|
-
import shlex
|
18
|
-
import stat
|
19
|
-
import subprocess
|
20
|
-
import sys
|
21
|
-
import textwrap
|
22
|
-
import threading
|
23
|
-
import typing as ta
|
24
|
-
|
25
|
-
|
26
|
-
########################################
|
27
|
-
|
28
|
-
|
29
|
-
if sys.version_info < (3, 8):
|
30
|
-
raise OSError(f'Requires python (3, 8), got {sys.version_info} from {sys.executable}') # noqa
|
31
|
-
|
32
|
-
|
33
|
-
########################################
|
34
|
-
|
35
|
-
|
36
|
-
# ../../../../omlish/lite/cached.py
|
37
|
-
T = ta.TypeVar('T')
|
38
|
-
|
39
|
-
# base.py
|
40
|
-
ConcernT = ta.TypeVar('ConcernT')
|
41
|
-
ConfigT = ta.TypeVar('ConfigT')
|
42
|
-
SiteConcernT = ta.TypeVar('SiteConcernT', bound='SiteConcern')
|
43
|
-
SiteConcernConfigT = ta.TypeVar('SiteConcernConfigT', bound='SiteConcernConfig')
|
44
|
-
DeployConcernT = ta.TypeVar('DeployConcernT', bound='DeployConcern')
|
45
|
-
DeployConcernConfigT = ta.TypeVar('DeployConcernConfigT', bound='DeployConcernConfig')
|
46
|
-
|
47
|
-
|
48
|
-
########################################
|
49
|
-
# ../configs.py
|
50
|
-
|
51
|
-
|
52
|
-
##
|
53
|
-
|
54
|
-
|
55
|
-
@dc.dataclass(frozen=True)
|
56
|
-
class SiteConcernConfig(abc.ABC): # noqa
|
57
|
-
pass
|
58
|
-
|
59
|
-
|
60
|
-
@dc.dataclass(frozen=True)
|
61
|
-
class SiteConfig:
|
62
|
-
user = 'omlish'
|
63
|
-
|
64
|
-
root_dir: str = '~/deploy'
|
65
|
-
|
66
|
-
concerns: ta.List[SiteConcernConfig] = dc.field(default_factory=list)
|
67
|
-
|
68
|
-
|
69
|
-
##
|
70
|
-
|
71
|
-
|
72
|
-
@dc.dataclass(frozen=True)
|
73
|
-
class DeployConcernConfig(abc.ABC): # noqa
|
74
|
-
pass
|
75
|
-
|
76
|
-
|
77
|
-
@dc.dataclass(frozen=True)
|
78
|
-
class DeployConfig:
|
79
|
-
site: SiteConfig
|
80
|
-
|
81
|
-
name: str
|
82
|
-
|
83
|
-
concerns: ta.List[DeployConcernConfig] = dc.field(default_factory=list)
|
84
|
-
|
85
|
-
|
86
|
-
########################################
|
87
|
-
# ../../../../../omlish/lite/cached.py
|
88
|
-
|
89
|
-
|
90
|
-
class _cached_nullary: # noqa
|
91
|
-
def __init__(self, fn):
|
92
|
-
super().__init__()
|
93
|
-
self._fn = fn
|
94
|
-
self._value = self._missing = object()
|
95
|
-
functools.update_wrapper(self, fn)
|
96
|
-
|
97
|
-
def __call__(self, *args, **kwargs): # noqa
|
98
|
-
if self._value is self._missing:
|
99
|
-
self._value = self._fn()
|
100
|
-
return self._value
|
101
|
-
|
102
|
-
def __get__(self, instance, owner): # noqa
|
103
|
-
bound = instance.__dict__[self._fn.__name__] = self.__class__(self._fn.__get__(instance, owner))
|
104
|
-
return bound
|
105
|
-
|
106
|
-
|
107
|
-
def cached_nullary(fn): # ta.Callable[..., T]) -> ta.Callable[..., T]:
|
108
|
-
return _cached_nullary(fn)
|
109
|
-
|
110
|
-
|
111
|
-
########################################
|
112
|
-
# ../../../../../omlish/lite/json.py
|
113
|
-
|
114
|
-
|
115
|
-
##
|
116
|
-
|
117
|
-
|
118
|
-
JSON_PRETTY_INDENT = 2
|
119
|
-
|
120
|
-
JSON_PRETTY_KWARGS: ta.Mapping[str, ta.Any] = dict(
|
121
|
-
indent=JSON_PRETTY_INDENT,
|
122
|
-
)
|
123
|
-
|
124
|
-
json_dump_pretty: ta.Callable[..., bytes] = functools.partial(json.dump, **JSON_PRETTY_KWARGS) # type: ignore
|
125
|
-
json_dumps_pretty: ta.Callable[..., str] = functools.partial(json.dumps, **JSON_PRETTY_KWARGS)
|
126
|
-
|
127
|
-
|
128
|
-
##
|
129
|
-
|
130
|
-
|
131
|
-
JSON_COMPACT_SEPARATORS = (',', ':')
|
132
|
-
|
133
|
-
JSON_COMPACT_KWARGS: ta.Mapping[str, ta.Any] = dict(
|
134
|
-
indent=None,
|
135
|
-
separators=JSON_COMPACT_SEPARATORS,
|
136
|
-
)
|
137
|
-
|
138
|
-
json_dump_compact: ta.Callable[..., bytes] = functools.partial(json.dump, **JSON_COMPACT_KWARGS) # type: ignore
|
139
|
-
json_dumps_compact: ta.Callable[..., str] = functools.partial(json.dumps, **JSON_COMPACT_KWARGS)
|
140
|
-
|
141
|
-
|
142
|
-
########################################
|
143
|
-
# ../base.py
|
144
|
-
|
145
|
-
|
146
|
-
##
|
147
|
-
|
148
|
-
|
149
|
-
@dc.dataclass(frozen=True)
|
150
|
-
class FsItem(abc.ABC):
|
151
|
-
path: str
|
152
|
-
|
153
|
-
@property
|
154
|
-
@abc.abstractmethod
|
155
|
-
def is_dir(self) -> bool:
|
156
|
-
raise NotImplementedError
|
157
|
-
|
158
|
-
|
159
|
-
@dc.dataclass(frozen=True)
|
160
|
-
class FsFile(FsItem):
|
161
|
-
@property
|
162
|
-
def is_dir(self) -> bool:
|
163
|
-
return False
|
164
|
-
|
165
|
-
|
166
|
-
@dc.dataclass(frozen=True)
|
167
|
-
class FsDir(FsItem):
|
168
|
-
@property
|
169
|
-
def is_dir(self) -> bool:
|
170
|
-
return True
|
171
|
-
|
172
|
-
|
173
|
-
##
|
174
|
-
|
175
|
-
|
176
|
-
class Runtime(abc.ABC):
|
177
|
-
class Stat(ta.NamedTuple):
|
178
|
-
path: str
|
179
|
-
is_dir: bool
|
180
|
-
|
181
|
-
@abc.abstractmethod
|
182
|
-
def stat(self, p: str) -> ta.Optional[Stat]:
|
183
|
-
raise NotImplementedError
|
184
|
-
|
185
|
-
@abc.abstractmethod
|
186
|
-
def make_dirs(self, p: str, exist_ok: bool = False) -> None:
|
187
|
-
raise NotImplementedError
|
188
|
-
|
189
|
-
@abc.abstractmethod
|
190
|
-
def write_file(self, p: str, c: ta.Union[str, bytes]) -> None:
|
191
|
-
raise NotImplementedError
|
192
|
-
|
193
|
-
@abc.abstractmethod
|
194
|
-
def sh(self, *ss: str) -> None:
|
195
|
-
raise NotImplementedError
|
196
|
-
|
197
|
-
|
198
|
-
##
|
199
|
-
|
200
|
-
|
201
|
-
class ConcernsContainer(abc.ABC, ta.Generic[ConcernT, ConfigT]):
|
202
|
-
concern_cls: ta.ClassVar[type]
|
203
|
-
|
204
|
-
def __init__(
|
205
|
-
self,
|
206
|
-
config: ConfigT,
|
207
|
-
) -> None:
|
208
|
-
super().__init__()
|
209
|
-
self._config = config
|
210
|
-
|
211
|
-
concern_cls_dct = self._concern_cls_by_config_cls()
|
212
|
-
self._concerns = [
|
213
|
-
concern_cls_dct[type(c)](c, self) # type: ignore
|
214
|
-
for c in config.concerns # type: ignore
|
215
|
-
]
|
216
|
-
self._concerns_by_cls: ta.Dict[ta.Type[ConcernT], ConcernT] = {}
|
217
|
-
for c in self._concerns:
|
218
|
-
if type(c) in self._concerns_by_cls:
|
219
|
-
raise TypeError(f'Duplicate concern type: {c}')
|
220
|
-
self._concerns_by_cls[type(c)] = c
|
221
|
-
|
222
|
-
@classmethod
|
223
|
-
def _concern_cls_by_config_cls(cls) -> ta.Mapping[type, ta.Type[ConcernT]]:
|
224
|
-
return { # noqa
|
225
|
-
c.Config: c # type: ignore
|
226
|
-
for c in cls.concern_cls.__subclasses__()
|
227
|
-
}
|
228
|
-
|
229
|
-
@property
|
230
|
-
def config(self) -> ConfigT:
|
231
|
-
return self._config
|
232
|
-
|
233
|
-
@property
|
234
|
-
def concerns(self) -> ta.List[ConcernT]:
|
235
|
-
return self._concerns
|
236
|
-
|
237
|
-
def concern(self, cls: ta.Type[T]) -> T:
|
238
|
-
return self._concerns_by_cls[cls] # type: ignore
|
239
|
-
|
240
|
-
|
241
|
-
##
|
242
|
-
|
243
|
-
|
244
|
-
class SiteConcern(abc.ABC, ta.Generic[SiteConcernConfigT]):
|
245
|
-
def __init__(self, config: SiteConcernConfigT, site: 'Site') -> None:
|
246
|
-
super().__init__()
|
247
|
-
self._config = config
|
248
|
-
self._site = site
|
249
|
-
|
250
|
-
@property
|
251
|
-
def config(self) -> SiteConcernConfigT:
|
252
|
-
return self._config
|
253
|
-
|
254
|
-
@abc.abstractmethod
|
255
|
-
def run(self, runtime: Runtime) -> None:
|
256
|
-
raise NotImplementedError
|
257
|
-
|
258
|
-
|
259
|
-
##
|
260
|
-
|
261
|
-
|
262
|
-
class Site(ConcernsContainer[SiteConcern, SiteConfig]):
|
263
|
-
@abc.abstractmethod
|
264
|
-
def run(self, runtime: Runtime) -> None:
|
265
|
-
raise NotImplementedError
|
266
|
-
|
267
|
-
|
268
|
-
##
|
269
|
-
|
270
|
-
|
271
|
-
class DeployConcern(abc.ABC, ta.Generic[DeployConcernConfigT]):
|
272
|
-
def __init__(self, config: DeployConcernConfigT, deploy: 'Deploy') -> None:
|
273
|
-
super().__init__()
|
274
|
-
self._config = config
|
275
|
-
self._deploy = deploy
|
276
|
-
|
277
|
-
@property
|
278
|
-
def config(self) -> DeployConcernConfigT:
|
279
|
-
return self._config
|
280
|
-
|
281
|
-
def fs_items(self) -> ta.Sequence[FsItem]:
|
282
|
-
return []
|
283
|
-
|
284
|
-
@abc.abstractmethod
|
285
|
-
def run(self, runtime: Runtime) -> None:
|
286
|
-
raise NotImplementedError
|
287
|
-
|
288
|
-
|
289
|
-
##
|
290
|
-
|
291
|
-
|
292
|
-
class Deploy(ConcernsContainer[DeployConcern, DeployConfig]):
|
293
|
-
@property
|
294
|
-
@abc.abstractmethod
|
295
|
-
def site(self) -> Site:
|
296
|
-
raise NotImplementedError
|
297
|
-
|
298
|
-
@abc.abstractmethod
|
299
|
-
def run(self, runtime: Runtime) -> None:
|
300
|
-
raise NotImplementedError
|
301
|
-
|
302
|
-
|
303
|
-
########################################
|
304
|
-
# ../../../../../omlish/lite/logs.py
|
305
|
-
"""
|
306
|
-
TODO:
|
307
|
-
- translate json keys
|
308
|
-
- debug
|
309
|
-
"""
|
310
|
-
|
311
|
-
|
312
|
-
log = logging.getLogger(__name__)
|
313
|
-
|
314
|
-
|
315
|
-
##
|
316
|
-
|
317
|
-
|
318
|
-
class TidLogFilter(logging.Filter):
|
319
|
-
|
320
|
-
def filter(self, record):
|
321
|
-
record.tid = threading.get_native_id()
|
322
|
-
return True
|
323
|
-
|
324
|
-
|
325
|
-
##
|
326
|
-
|
327
|
-
|
328
|
-
class JsonLogFormatter(logging.Formatter):
|
329
|
-
|
330
|
-
KEYS: ta.Mapping[str, bool] = {
|
331
|
-
'name': False,
|
332
|
-
'msg': False,
|
333
|
-
'args': False,
|
334
|
-
'levelname': False,
|
335
|
-
'levelno': False,
|
336
|
-
'pathname': False,
|
337
|
-
'filename': False,
|
338
|
-
'module': False,
|
339
|
-
'exc_info': True,
|
340
|
-
'exc_text': True,
|
341
|
-
'stack_info': True,
|
342
|
-
'lineno': False,
|
343
|
-
'funcName': False,
|
344
|
-
'created': False,
|
345
|
-
'msecs': False,
|
346
|
-
'relativeCreated': False,
|
347
|
-
'thread': False,
|
348
|
-
'threadName': False,
|
349
|
-
'processName': False,
|
350
|
-
'process': False,
|
351
|
-
}
|
352
|
-
|
353
|
-
def format(self, record: logging.LogRecord) -> str:
|
354
|
-
dct = {
|
355
|
-
k: v
|
356
|
-
for k, o in self.KEYS.items()
|
357
|
-
for v in [getattr(record, k)]
|
358
|
-
if not (o and v is None)
|
359
|
-
}
|
360
|
-
return json_dumps_compact(dct)
|
361
|
-
|
362
|
-
|
363
|
-
##
|
364
|
-
|
365
|
-
|
366
|
-
STANDARD_LOG_FORMAT_PARTS = [
|
367
|
-
('asctime', '%(asctime)-15s'),
|
368
|
-
('process', 'pid=%(process)-6s'),
|
369
|
-
('thread', 'tid=%(thread)x'),
|
370
|
-
('levelname', '%(levelname)s'),
|
371
|
-
('name', '%(name)s'),
|
372
|
-
('separator', '::'),
|
373
|
-
('message', '%(message)s'),
|
374
|
-
]
|
375
|
-
|
376
|
-
|
377
|
-
class StandardLogFormatter(logging.Formatter):
|
378
|
-
|
379
|
-
@staticmethod
|
380
|
-
def build_log_format(parts: ta.Iterable[ta.Tuple[str, str]]) -> str:
|
381
|
-
return ' '.join(v for k, v in parts)
|
382
|
-
|
383
|
-
converter = datetime.datetime.fromtimestamp # type: ignore
|
384
|
-
|
385
|
-
def formatTime(self, record, datefmt=None):
|
386
|
-
ct = self.converter(record.created) # type: ignore
|
387
|
-
if datefmt:
|
388
|
-
return ct.strftime(datefmt) # noqa
|
389
|
-
else:
|
390
|
-
t = ct.strftime('%Y-%m-%d %H:%M:%S')
|
391
|
-
return '%s.%03d' % (t, record.msecs) # noqa
|
392
|
-
|
393
|
-
|
394
|
-
##
|
395
|
-
|
396
|
-
|
397
|
-
class ProxyLogFilterer(logging.Filterer):
|
398
|
-
def __init__(self, underlying: logging.Filterer) -> None: # noqa
|
399
|
-
self._underlying = underlying
|
400
|
-
|
401
|
-
@property
|
402
|
-
def underlying(self) -> logging.Filterer:
|
403
|
-
return self._underlying
|
404
|
-
|
405
|
-
@property
|
406
|
-
def filters(self):
|
407
|
-
return self._underlying.filters
|
408
|
-
|
409
|
-
@filters.setter
|
410
|
-
def filters(self, filters):
|
411
|
-
self._underlying.filters = filters
|
412
|
-
|
413
|
-
def addFilter(self, filter): # noqa
|
414
|
-
self._underlying.addFilter(filter)
|
415
|
-
|
416
|
-
def removeFilter(self, filter): # noqa
|
417
|
-
self._underlying.removeFilter(filter)
|
418
|
-
|
419
|
-
def filter(self, record):
|
420
|
-
return self._underlying.filter(record)
|
421
|
-
|
422
|
-
|
423
|
-
class ProxyLogHandler(ProxyLogFilterer, logging.Handler):
|
424
|
-
def __init__(self, underlying: logging.Handler) -> None: # noqa
|
425
|
-
ProxyLogFilterer.__init__(self, underlying)
|
426
|
-
|
427
|
-
_underlying: logging.Handler
|
428
|
-
|
429
|
-
@property
|
430
|
-
def underlying(self) -> logging.Handler:
|
431
|
-
return self._underlying
|
432
|
-
|
433
|
-
def get_name(self):
|
434
|
-
return self._underlying.get_name()
|
435
|
-
|
436
|
-
def set_name(self, name):
|
437
|
-
self._underlying.set_name(name)
|
438
|
-
|
439
|
-
@property
|
440
|
-
def name(self):
|
441
|
-
return self._underlying.name
|
442
|
-
|
443
|
-
@property
|
444
|
-
def level(self):
|
445
|
-
return self._underlying.level
|
446
|
-
|
447
|
-
@level.setter
|
448
|
-
def level(self, level):
|
449
|
-
self._underlying.level = level
|
450
|
-
|
451
|
-
@property
|
452
|
-
def formatter(self):
|
453
|
-
return self._underlying.formatter
|
454
|
-
|
455
|
-
@formatter.setter
|
456
|
-
def formatter(self, formatter):
|
457
|
-
self._underlying.formatter = formatter
|
458
|
-
|
459
|
-
def createLock(self):
|
460
|
-
self._underlying.createLock()
|
461
|
-
|
462
|
-
def acquire(self):
|
463
|
-
self._underlying.acquire()
|
464
|
-
|
465
|
-
def release(self):
|
466
|
-
self._underlying.release()
|
467
|
-
|
468
|
-
def setLevel(self, level):
|
469
|
-
self._underlying.setLevel(level)
|
470
|
-
|
471
|
-
def format(self, record):
|
472
|
-
return self._underlying.format(record)
|
473
|
-
|
474
|
-
def emit(self, record):
|
475
|
-
self._underlying.emit(record)
|
476
|
-
|
477
|
-
def handle(self, record):
|
478
|
-
return self._underlying.handle(record)
|
479
|
-
|
480
|
-
def setFormatter(self, fmt):
|
481
|
-
self._underlying.setFormatter(fmt)
|
482
|
-
|
483
|
-
def flush(self):
|
484
|
-
self._underlying.flush()
|
485
|
-
|
486
|
-
def close(self):
|
487
|
-
self._underlying.close()
|
488
|
-
|
489
|
-
def handleError(self, record):
|
490
|
-
self._underlying.handleError(record)
|
491
|
-
|
492
|
-
|
493
|
-
##
|
494
|
-
|
495
|
-
|
496
|
-
class StandardLogHandler(ProxyLogHandler):
|
497
|
-
pass
|
498
|
-
|
499
|
-
|
500
|
-
##
|
501
|
-
|
502
|
-
|
503
|
-
@contextlib.contextmanager
|
504
|
-
def _locking_logging_module_lock() -> ta.Iterator[None]:
|
505
|
-
if hasattr(logging, '_acquireLock'):
|
506
|
-
logging._acquireLock() # noqa
|
507
|
-
try:
|
508
|
-
yield
|
509
|
-
finally:
|
510
|
-
logging._releaseLock() # type: ignore # noqa
|
511
|
-
|
512
|
-
elif hasattr(logging, '_lock'):
|
513
|
-
# https://github.com/python/cpython/commit/74723e11109a320e628898817ab449b3dad9ee96
|
514
|
-
with logging._lock: # noqa
|
515
|
-
yield
|
516
|
-
|
517
|
-
else:
|
518
|
-
raise Exception("Can't find lock in logging module")
|
519
|
-
|
520
|
-
|
521
|
-
def configure_standard_logging(
|
522
|
-
level: ta.Union[int, str] = logging.INFO,
|
523
|
-
*,
|
524
|
-
json: bool = False,
|
525
|
-
target: ta.Optional[logging.Logger] = None,
|
526
|
-
force: bool = False,
|
527
|
-
handler_factory: ta.Optional[ta.Callable[[], logging.Handler]] = None,
|
528
|
-
) -> ta.Optional[StandardLogHandler]:
|
529
|
-
with _locking_logging_module_lock():
|
530
|
-
if target is None:
|
531
|
-
target = logging.root
|
532
|
-
|
533
|
-
#
|
534
|
-
|
535
|
-
if not force:
|
536
|
-
if any(isinstance(h, StandardLogHandler) for h in list(target.handlers)):
|
537
|
-
return None
|
538
|
-
|
539
|
-
#
|
540
|
-
|
541
|
-
if handler_factory is not None:
|
542
|
-
handler = handler_factory()
|
543
|
-
else:
|
544
|
-
handler = logging.StreamHandler()
|
545
|
-
|
546
|
-
#
|
547
|
-
|
548
|
-
formatter: logging.Formatter
|
549
|
-
if json:
|
550
|
-
formatter = JsonLogFormatter()
|
551
|
-
else:
|
552
|
-
formatter = StandardLogFormatter(StandardLogFormatter.build_log_format(STANDARD_LOG_FORMAT_PARTS))
|
553
|
-
handler.setFormatter(formatter)
|
554
|
-
|
555
|
-
#
|
556
|
-
|
557
|
-
handler.addFilter(TidLogFilter())
|
558
|
-
|
559
|
-
#
|
560
|
-
|
561
|
-
target.addHandler(handler)
|
562
|
-
|
563
|
-
#
|
564
|
-
|
565
|
-
if level is not None:
|
566
|
-
target.setLevel(level)
|
567
|
-
|
568
|
-
#
|
569
|
-
|
570
|
-
return StandardLogHandler(handler)
|
571
|
-
|
572
|
-
|
573
|
-
########################################
|
574
|
-
# ../../../../../omlish/lite/runtime.py
|
575
|
-
|
576
|
-
|
577
|
-
@cached_nullary
|
578
|
-
def is_debugger_attached() -> bool:
|
579
|
-
return any(frame[1].endswith('pydevd.py') for frame in inspect.stack())
|
580
|
-
|
581
|
-
|
582
|
-
REQUIRED_PYTHON_VERSION = (3, 8)
|
583
|
-
|
584
|
-
|
585
|
-
def check_runtime_version() -> None:
|
586
|
-
if sys.version_info < REQUIRED_PYTHON_VERSION:
|
587
|
-
raise OSError(f'Requires python {REQUIRED_PYTHON_VERSION}, got {sys.version_info} from {sys.executable}') # noqa
|
588
|
-
|
589
|
-
|
590
|
-
########################################
|
591
|
-
# ../deploy.py
|
592
|
-
|
593
|
-
|
594
|
-
class DeployImpl(Deploy):
|
595
|
-
concern_cls = DeployConcern
|
596
|
-
|
597
|
-
def __init__(
|
598
|
-
self,
|
599
|
-
config: DeployConfig,
|
600
|
-
site: Site,
|
601
|
-
) -> None:
|
602
|
-
super().__init__(config)
|
603
|
-
self._site = site
|
604
|
-
|
605
|
-
@property
|
606
|
-
def site(self) -> Site:
|
607
|
-
return self._site
|
608
|
-
|
609
|
-
def run(self, runtime: Runtime) -> None:
|
610
|
-
for c in self._concerns:
|
611
|
-
c.run(runtime)
|
612
|
-
|
613
|
-
|
614
|
-
########################################
|
615
|
-
# ../nginx.py
|
616
|
-
|
617
|
-
|
618
|
-
class NginxSiteConcern(SiteConcern['NginxSiteConcern.Config']):
|
619
|
-
@dc.dataclass(frozen=True)
|
620
|
-
class Config(SiteConcernConfig):
|
621
|
-
global_conf_file: str = '/etc/nginx/sites-enabled/omlish.conf'
|
622
|
-
|
623
|
-
@cached_nullary
|
624
|
-
def confs_dir(self) -> str:
|
625
|
-
return os.path.join(self._site.config.root_dir, 'conf', 'nginx')
|
626
|
-
|
627
|
-
def run(self, runtime: Runtime) -> None:
|
628
|
-
if runtime.stat(self._config.global_conf_file) is None:
|
629
|
-
runtime.write_file(
|
630
|
-
self._config.global_conf_file,
|
631
|
-
f'include {self.confs_dir()}/*.conf;\n',
|
632
|
-
)
|
633
|
-
|
634
|
-
|
635
|
-
class NginxDeployConcern(DeployConcern['NginxDeployConcern.Config']):
|
636
|
-
@dc.dataclass(frozen=True)
|
637
|
-
class Config(DeployConcernConfig):
|
638
|
-
listen_port: int = 80
|
639
|
-
proxy_port: int = 8000
|
640
|
-
|
641
|
-
@cached_nullary
|
642
|
-
def conf_file(self) -> str:
|
643
|
-
return os.path.join(self._deploy.site.concern(NginxSiteConcern).confs_dir(), self._deploy.config.name + '.conf')
|
644
|
-
|
645
|
-
@cached_nullary
|
646
|
-
def fs_items(self) -> ta.Sequence[FsItem]:
|
647
|
-
return [FsFile(self.conf_file())]
|
648
|
-
|
649
|
-
def run(self, runtime: Runtime) -> None:
|
650
|
-
runtime.make_dirs(os.path.dirname(self.conf_file()))
|
651
|
-
|
652
|
-
conf = textwrap.dedent(f"""
|
653
|
-
server {{
|
654
|
-
listen {self._config.listen_port};
|
655
|
-
location / {{
|
656
|
-
proxy_pass http://127.0.0.1:{self._config.proxy_port}/;
|
657
|
-
}}
|
658
|
-
}}
|
659
|
-
""")
|
660
|
-
|
661
|
-
runtime.write_file(self.conf_file(), conf)
|
662
|
-
|
663
|
-
|
664
|
-
########################################
|
665
|
-
# ../repo.py
|
666
|
-
|
667
|
-
|
668
|
-
class RepoDeployConcern(DeployConcern['RepoDeployConcern.Config']):
|
669
|
-
@dc.dataclass(frozen=True)
|
670
|
-
class Config(DeployConcernConfig):
|
671
|
-
url: str
|
672
|
-
revision: str = 'master'
|
673
|
-
init_submodules: bool = False
|
674
|
-
|
675
|
-
@cached_nullary
|
676
|
-
def repo_dir(self) -> str:
|
677
|
-
return os.path.join(self._deploy.site.config.root_dir, 'repos', self._deploy.config.name)
|
678
|
-
|
679
|
-
@cached_nullary
|
680
|
-
def fs_items(self) -> ta.Sequence[FsItem]:
|
681
|
-
return [FsDir(self.repo_dir())]
|
682
|
-
|
683
|
-
def run(self, runtime: Runtime) -> None:
|
684
|
-
runtime.make_dirs(self.repo_dir())
|
685
|
-
|
686
|
-
runtime.sh(
|
687
|
-
f'cd {self.repo_dir()}',
|
688
|
-
'git init',
|
689
|
-
f'git remote add origin {self._config.url}',
|
690
|
-
f'git fetch --depth 1 origin {self._config.revision}',
|
691
|
-
'git checkout FETCH_HEAD',
|
692
|
-
*([
|
693
|
-
'git submodule update --init',
|
694
|
-
] if self._config.init_submodules else []),
|
695
|
-
)
|
696
|
-
|
697
|
-
|
698
|
-
########################################
|
699
|
-
# ../site.py
|
700
|
-
|
701
|
-
|
702
|
-
class SiteImpl(Site):
|
703
|
-
concern_cls = SiteConcern
|
704
|
-
|
705
|
-
def run(self, runtime: Runtime) -> None:
|
706
|
-
for c in self._concerns:
|
707
|
-
c.run(runtime)
|
708
|
-
|
709
|
-
|
710
|
-
########################################
|
711
|
-
# ../../../../../omlish/lite/subprocesses.py
|
712
|
-
|
713
|
-
|
714
|
-
##
|
715
|
-
|
716
|
-
|
717
|
-
_SUBPROCESS_SHELL_WRAP_EXECS = False
|
718
|
-
|
719
|
-
|
720
|
-
def subprocess_shell_wrap_exec(*args: str) -> ta.Tuple[str, ...]:
|
721
|
-
return ('sh', '-c', ' '.join(map(shlex.quote, args)))
|
722
|
-
|
723
|
-
|
724
|
-
def subprocess_maybe_shell_wrap_exec(*args: str) -> ta.Tuple[str, ...]:
|
725
|
-
if _SUBPROCESS_SHELL_WRAP_EXECS or is_debugger_attached():
|
726
|
-
return subprocess_shell_wrap_exec(*args)
|
727
|
-
else:
|
728
|
-
return args
|
729
|
-
|
730
|
-
|
731
|
-
def _prepare_subprocess_invocation(
|
732
|
-
*args: str,
|
733
|
-
env: ta.Optional[ta.Mapping[str, ta.Any]] = None,
|
734
|
-
extra_env: ta.Optional[ta.Mapping[str, ta.Any]] = None,
|
735
|
-
quiet: bool = False,
|
736
|
-
shell: bool = False,
|
737
|
-
**kwargs: ta.Any,
|
738
|
-
) -> ta.Tuple[ta.Tuple[ta.Any, ...], ta.Dict[str, ta.Any]]:
|
739
|
-
log.debug(args)
|
740
|
-
if extra_env:
|
741
|
-
log.debug(extra_env)
|
742
|
-
|
743
|
-
if extra_env:
|
744
|
-
env = {**(env if env is not None else os.environ), **extra_env}
|
745
|
-
|
746
|
-
if quiet and 'stderr' not in kwargs:
|
747
|
-
if not log.isEnabledFor(logging.DEBUG):
|
748
|
-
kwargs['stderr'] = subprocess.DEVNULL
|
749
|
-
|
750
|
-
if not shell:
|
751
|
-
args = subprocess_maybe_shell_wrap_exec(*args)
|
752
|
-
|
753
|
-
return args, dict(
|
754
|
-
env=env,
|
755
|
-
shell=shell,
|
756
|
-
**kwargs,
|
757
|
-
)
|
758
|
-
|
759
|
-
|
760
|
-
def subprocess_check_call(*args: str, stdout=sys.stderr, **kwargs: ta.Any) -> None:
|
761
|
-
args, kwargs = _prepare_subprocess_invocation(*args, stdout=stdout, **kwargs)
|
762
|
-
return subprocess.check_call(args, **kwargs) # type: ignore
|
763
|
-
|
764
|
-
|
765
|
-
def subprocess_check_output(*args: str, **kwargs: ta.Any) -> bytes:
|
766
|
-
args, kwargs = _prepare_subprocess_invocation(*args, **kwargs)
|
767
|
-
return subprocess.check_output(args, **kwargs)
|
768
|
-
|
769
|
-
|
770
|
-
def subprocess_check_output_str(*args: str, **kwargs: ta.Any) -> str:
|
771
|
-
return subprocess_check_output(*args, **kwargs).decode().strip()
|
772
|
-
|
773
|
-
|
774
|
-
##
|
775
|
-
|
776
|
-
|
777
|
-
DEFAULT_SUBPROCESS_TRY_EXCEPTIONS: ta.Tuple[ta.Type[Exception], ...] = (
|
778
|
-
FileNotFoundError,
|
779
|
-
subprocess.CalledProcessError,
|
780
|
-
)
|
781
|
-
|
782
|
-
|
783
|
-
def subprocess_try_call(
|
784
|
-
*args: str,
|
785
|
-
try_exceptions: ta.Tuple[ta.Type[Exception], ...] = DEFAULT_SUBPROCESS_TRY_EXCEPTIONS,
|
786
|
-
**kwargs: ta.Any,
|
787
|
-
) -> bool:
|
788
|
-
try:
|
789
|
-
subprocess_check_call(*args, **kwargs)
|
790
|
-
except try_exceptions as e: # noqa
|
791
|
-
if log.isEnabledFor(logging.DEBUG):
|
792
|
-
log.exception('command failed')
|
793
|
-
return False
|
794
|
-
else:
|
795
|
-
return True
|
796
|
-
|
797
|
-
|
798
|
-
def subprocess_try_output(
|
799
|
-
*args: str,
|
800
|
-
try_exceptions: ta.Tuple[ta.Type[Exception], ...] = DEFAULT_SUBPROCESS_TRY_EXCEPTIONS,
|
801
|
-
**kwargs: ta.Any,
|
802
|
-
) -> ta.Optional[bytes]:
|
803
|
-
try:
|
804
|
-
return subprocess_check_output(*args, **kwargs)
|
805
|
-
except try_exceptions as e: # noqa
|
806
|
-
if log.isEnabledFor(logging.DEBUG):
|
807
|
-
log.exception('command failed')
|
808
|
-
return None
|
809
|
-
|
810
|
-
|
811
|
-
def subprocess_try_output_str(*args: str, **kwargs: ta.Any) -> ta.Optional[str]:
|
812
|
-
out = subprocess_try_output(*args, **kwargs)
|
813
|
-
return out.decode().strip() if out is not None else None
|
814
|
-
|
815
|
-
|
816
|
-
##
|
817
|
-
|
818
|
-
|
819
|
-
def subprocess_close(
|
820
|
-
proc: subprocess.Popen,
|
821
|
-
timeout: ta.Optional[float] = None,
|
822
|
-
) -> None:
|
823
|
-
# TODO: terminate, sleep, kill
|
824
|
-
if proc.stdout:
|
825
|
-
proc.stdout.close()
|
826
|
-
if proc.stderr:
|
827
|
-
proc.stderr.close()
|
828
|
-
if proc.stdin:
|
829
|
-
proc.stdin.close()
|
830
|
-
|
831
|
-
proc.wait(timeout)
|
832
|
-
|
833
|
-
|
834
|
-
########################################
|
835
|
-
# ../runtime.py
|
836
|
-
|
837
|
-
|
838
|
-
class RuntimeImpl(Runtime):
|
839
|
-
def __init__(self) -> None:
|
840
|
-
super().__init__()
|
841
|
-
|
842
|
-
def stat(self, p: str) -> ta.Optional[Runtime.Stat]:
|
843
|
-
try:
|
844
|
-
st = os.stat(p)
|
845
|
-
except FileNotFoundError:
|
846
|
-
return None
|
847
|
-
else:
|
848
|
-
return Runtime.Stat(
|
849
|
-
path=p,
|
850
|
-
is_dir=bool(st.st_mode & stat.S_IFDIR),
|
851
|
-
)
|
852
|
-
|
853
|
-
def make_dirs(self, p: str, exist_ok: bool = False) -> None:
|
854
|
-
os.makedirs(p, exist_ok=exist_ok)
|
855
|
-
|
856
|
-
def write_file(self, p: str, c: ta.Union[str, bytes]) -> None:
|
857
|
-
if os.path.exists(p):
|
858
|
-
raise RuntimeError(f'Path exists: {p}')
|
859
|
-
with open(p, 'w' if isinstance(c, str) else 'wb') as f:
|
860
|
-
f.write(c)
|
861
|
-
|
862
|
-
def sh(self, *ss: str) -> None:
|
863
|
-
s = ' && '.join(ss)
|
864
|
-
log.info('Executing: %s', s)
|
865
|
-
subprocess_check_call(s, shell=True)
|
866
|
-
|
867
|
-
|
868
|
-
########################################
|
869
|
-
# ../venv.py
|
870
|
-
|
871
|
-
|
872
|
-
class VenvDeployConcern(DeployConcern['VenvDeployConcern.Config']):
|
873
|
-
@dc.dataclass(frozen=True)
|
874
|
-
class Config(DeployConcernConfig):
|
875
|
-
interp_version: str
|
876
|
-
requirements_txt: str = 'requirements.txt'
|
877
|
-
|
878
|
-
@cached_nullary
|
879
|
-
def venv_dir(self) -> str:
|
880
|
-
return os.path.join(self._deploy.site.config.root_dir, 'venvs', self._deploy.config.name)
|
881
|
-
|
882
|
-
@cached_nullary
|
883
|
-
def fs_items(self) -> ta.Sequence[FsItem]:
|
884
|
-
return [FsDir(self.venv_dir())]
|
885
|
-
|
886
|
-
@cached_nullary
|
887
|
-
def exe(self) -> str:
|
888
|
-
return os.path.join(self.venv_dir(), 'bin', 'python')
|
889
|
-
|
890
|
-
def run(self, runtime: Runtime) -> None:
|
891
|
-
runtime.make_dirs(self.venv_dir())
|
892
|
-
|
893
|
-
rd = self._deploy.concern(RepoDeployConcern).repo_dir()
|
894
|
-
|
895
|
-
l, r = os.path.split(self.venv_dir())
|
896
|
-
|
897
|
-
# FIXME: lol
|
898
|
-
py_exe = 'python3'
|
899
|
-
|
900
|
-
runtime.sh(
|
901
|
-
f'cd {l}',
|
902
|
-
f'{py_exe} -mvenv {r}',
|
903
|
-
|
904
|
-
# https://stackoverflow.com/questions/77364550/attributeerror-module-pkgutil-has-no-attribute-impimporter-did-you-mean
|
905
|
-
f'{self.exe()} -m ensurepip',
|
906
|
-
f'{self.exe()} -mpip install --upgrade setuptools pip',
|
907
|
-
|
908
|
-
f'{self.exe()} -mpip install -r {rd}/{self._config.requirements_txt}', # noqa
|
909
|
-
)
|
910
|
-
|
911
|
-
|
912
|
-
########################################
|
913
|
-
# ../supervisor.py
|
914
|
-
|
915
|
-
|
916
|
-
# class SupervisorSiteConcern(SiteConcern['SupervisorSiteConcern.Config']):
|
917
|
-
# @dc.dataclass(frozen=True)
|
918
|
-
# class Config(DeployConcern.Config):
|
919
|
-
# global_conf_file: str = '/etc/supervisor/conf.d/supervisord.conf'
|
920
|
-
#
|
921
|
-
# def run(self) -> None:
|
922
|
-
# sup_conf_dir = os.path.join(self._d.home_dir(), 'conf/supervisor')
|
923
|
-
# with open(self._d.host_cfg.global_supervisor_conf_file_path) as f:
|
924
|
-
# glo_sup_conf = f.read()
|
925
|
-
# if sup_conf_dir not in glo_sup_conf:
|
926
|
-
# log.info('Updating global supervisor conf at %s', self._d.host_cfg.global_supervisor_conf_file_path) # noqa
|
927
|
-
# glo_sup_conf += textwrap.dedent(f"""
|
928
|
-
# [include]
|
929
|
-
# files = {self._d.home_dir()}/conf/supervisor/*.conf
|
930
|
-
# """)
|
931
|
-
# with open(self._d.host_cfg.global_supervisor_conf_file_path, 'w') as f:
|
932
|
-
# f.write(glo_sup_conf)
|
933
|
-
|
934
|
-
|
935
|
-
class SupervisorDeployConcern(DeployConcern['SupervisorDeployConcern.Config']):
|
936
|
-
@dc.dataclass(frozen=True)
|
937
|
-
class Config(DeployConcernConfig):
|
938
|
-
entrypoint: str
|
939
|
-
|
940
|
-
@cached_nullary
|
941
|
-
def conf_file(self) -> str:
|
942
|
-
return os.path.join(self._deploy.site.config.root_dir, 'conf', 'supervisor', self._deploy.config.name + '.conf')
|
943
|
-
|
944
|
-
@cached_nullary
|
945
|
-
def fs_items(self) -> ta.Sequence[FsItem]:
|
946
|
-
return [FsFile(self.conf_file())]
|
947
|
-
|
948
|
-
def run(self, runtime: Runtime) -> None:
|
949
|
-
runtime.make_dirs(os.path.dirname(self.conf_file()))
|
950
|
-
|
951
|
-
rd = self._deploy.concern(RepoDeployConcern).repo_dir()
|
952
|
-
vx = self._deploy.concern(VenvDeployConcern).exe()
|
953
|
-
|
954
|
-
conf = textwrap.dedent(f"""
|
955
|
-
[program:{self._deploy.config.name}]
|
956
|
-
command={vx} -m {self._config.entrypoint}
|
957
|
-
directory={rd}
|
958
|
-
user={self._deploy.site.config.user}
|
959
|
-
autostart=true
|
960
|
-
autorestart=true
|
961
|
-
""")
|
962
|
-
|
963
|
-
runtime.write_file(self.conf_file(), conf)
|
964
|
-
|
965
|
-
|
966
|
-
########################################
|
967
|
-
# main.py
|
968
|
-
|
969
|
-
|
970
|
-
def _main() -> None:
|
971
|
-
pass
|
972
|
-
|
973
|
-
|
974
|
-
if __name__ == '__main__':
|
975
|
-
_main()
|