ominfra 0.0.0.dev90__py3-none-any.whl → 0.0.0.dev92__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- ominfra/clouds/aws/journald2aws/main.py +37 -20
- ominfra/clouds/aws/logs.py +2 -2
- ominfra/journald/__init__.py +0 -0
- ominfra/{clouds/aws/journald2aws/journald → journald}/messages.py +27 -15
- ominfra/journald/tailer.py +453 -0
- ominfra/scripts/journald2aws.py +549 -148
- ominfra/scripts/supervisor.py +484 -112
- ominfra/supervisor/compat.py +6 -2
- ominfra/supervisor/configs.py +54 -54
- ominfra/supervisor/context.py +2 -0
- ominfra/supervisor/datatypes.py +4 -0
- ominfra/supervisor/dispatchers.py +28 -16
- ominfra/supervisor/events.py +6 -7
- ominfra/supervisor/exceptions.py +7 -5
- ominfra/supervisor/process.py +14 -6
- ominfra/supervisor/supervisor.py +15 -29
- ominfra/{clouds/aws/journald2aws/threadworker.py → threadworker.py} +6 -3
- {ominfra-0.0.0.dev90.dist-info → ominfra-0.0.0.dev92.dist-info}/METADATA +4 -4
- {ominfra-0.0.0.dev90.dist-info → ominfra-0.0.0.dev92.dist-info}/RECORD +24 -24
- ominfra/clouds/aws/journald2aws/journald/__init__.py +0 -1
- ominfra/clouds/aws/journald2aws/journald/tailer.py +0 -108
- /ominfra/{clouds/aws/journald2aws/journald → journald}/genmessages.py +0 -0
- {ominfra-0.0.0.dev90.dist-info → ominfra-0.0.0.dev92.dist-info}/LICENSE +0 -0
- {ominfra-0.0.0.dev90.dist-info → ominfra-0.0.0.dev92.dist-info}/WHEEL +0 -0
- {ominfra-0.0.0.dev90.dist-info → ominfra-0.0.0.dev92.dist-info}/entry_points.txt +0 -0
- {ominfra-0.0.0.dev90.dist-info → ominfra-0.0.0.dev92.dist-info}/top_level.txt +0 -0
@@ -4,7 +4,7 @@
|
|
4
4
|
"""
|
5
5
|
TODO:
|
6
6
|
- create log group
|
7
|
-
- log stats - chunk sizes etc
|
7
|
+
- log stats - chunk sizes, byte count, num calls, etc
|
8
8
|
|
9
9
|
==
|
10
10
|
|
@@ -53,11 +53,11 @@ from omlish.lite.marshal import unmarshal_obj
|
|
53
53
|
from omlish.lite.pidfile import Pidfile
|
54
54
|
from omlish.lite.runtime import is_debugger_attached
|
55
55
|
|
56
|
+
from ....journald.messages import JournalctlMessage # noqa
|
57
|
+
from ....journald.tailer import JournalctlTailerWorker
|
56
58
|
from ..auth import AwsSigner
|
57
59
|
from ..logs import AwsLogMessagePoster
|
58
60
|
from ..logs import AwsPutLogEventsResponse
|
59
|
-
from .journald.messages import JournalctlMessage # noqa
|
60
|
-
from .journald.tailer import JournalctlTailerWorker
|
61
61
|
|
62
62
|
|
63
63
|
@dc.dataclass(frozen=True)
|
@@ -184,15 +184,18 @@ class JournalctlToAws:
|
|
184
184
|
|
185
185
|
@cached_nullary
|
186
186
|
def _journalctl_tailer_worker(self) -> JournalctlTailerWorker:
|
187
|
-
ac: ta.Optional[str] =
|
188
|
-
if ac is None:
|
189
|
-
ac = self._read_cursor_file()
|
190
|
-
if ac is not None:
|
191
|
-
log.info('Starting from cursor %s', ac)
|
187
|
+
ac: ta.Optional[str] = None
|
192
188
|
|
193
189
|
if (since := self._config.journalctl_since):
|
194
190
|
log.info('Starting since %s', since)
|
195
191
|
|
192
|
+
else:
|
193
|
+
ac = self._config.journalctl_after_cursor
|
194
|
+
if ac is None:
|
195
|
+
ac = self._read_cursor_file()
|
196
|
+
if ac is not None:
|
197
|
+
log.info('Starting from cursor %s', ac)
|
198
|
+
|
196
199
|
return JournalctlTailerWorker(
|
197
200
|
self._journalctl_message_queue(),
|
198
201
|
|
@@ -208,9 +211,9 @@ class JournalctlToAws:
|
|
208
211
|
def run(self) -> None:
|
209
212
|
self._ensure_locked()
|
210
213
|
|
211
|
-
q = self._journalctl_message_queue()
|
212
|
-
jtw = self._journalctl_tailer_worker()
|
213
|
-
mp = self._aws_log_message_poster()
|
214
|
+
q = self._journalctl_message_queue() # type: queue.Queue[ta.Sequence[JournalctlMessage]]
|
215
|
+
jtw = self._journalctl_tailer_worker() # type: JournalctlTailerWorker
|
216
|
+
mp = self._aws_log_message_poster() # type: AwsLogMessagePoster
|
214
217
|
|
215
218
|
jtw.start()
|
216
219
|
|
@@ -220,7 +223,13 @@ class JournalctlToAws:
|
|
220
223
|
log.critical('Journalctl tailer worker died')
|
221
224
|
break
|
222
225
|
|
223
|
-
|
226
|
+
try:
|
227
|
+
msgs: ta.Sequence[JournalctlMessage] = q.get(timeout=1.)
|
228
|
+
except queue.Empty:
|
229
|
+
msgs = []
|
230
|
+
if not msgs:
|
231
|
+
continue
|
232
|
+
|
224
233
|
log.debug('%r', msgs)
|
225
234
|
|
226
235
|
cur_cursor: ta.Optional[str] = None
|
@@ -233,10 +242,14 @@ class JournalctlToAws:
|
|
233
242
|
log.warning('Empty queue chunk')
|
234
243
|
continue
|
235
244
|
|
236
|
-
|
237
|
-
|
238
|
-
|
239
|
-
|
245
|
+
feed_msgs = []
|
246
|
+
for m in msgs:
|
247
|
+
feed_msgs.append(mp.Message(
|
248
|
+
message=json.dumps(m.dct, sort_keys=True),
|
249
|
+
ts_ms=int((m.ts_us / 1000.) if m.ts_us is not None else (time.time() * 1000.)),
|
250
|
+
))
|
251
|
+
|
252
|
+
[post] = mp.feed(feed_msgs)
|
240
253
|
log.debug('%r', post)
|
241
254
|
|
242
255
|
if not self._config.dry_run:
|
@@ -294,7 +307,7 @@ def _main() -> None:
|
|
294
307
|
if not args.real:
|
295
308
|
config = dc.replace(config, journalctl_cmd=[
|
296
309
|
sys.executable,
|
297
|
-
os.path.join(os.path.dirname(__file__), 'journald', 'genmessages.py'),
|
310
|
+
os.path.join(os.path.dirname(__file__), '..', '..', '..', 'journald', 'genmessages.py'),
|
298
311
|
'--sleep-n', '2',
|
299
312
|
'--sleep-s', '.5',
|
300
313
|
*(['--message', args.message] if args.message else []),
|
@@ -303,9 +316,13 @@ def _main() -> None:
|
|
303
316
|
|
304
317
|
#
|
305
318
|
|
306
|
-
for
|
307
|
-
|
308
|
-
|
319
|
+
for ca, pa in [
|
320
|
+
('journalctl_after_cursor', 'after_cursor'),
|
321
|
+
('journalctl_since', 'since'),
|
322
|
+
('dry_run', 'dry_run'),
|
323
|
+
]:
|
324
|
+
if (av := getattr(args, pa)):
|
325
|
+
config = dc.replace(config, **{ca: av})
|
309
326
|
|
310
327
|
#
|
311
328
|
|
ominfra/clouds/aws/logs.py
CHANGED
@@ -68,7 +68,7 @@ class AwsLogMessagePoster:
|
|
68
68
|
- max_items
|
69
69
|
- max_bytes - manually build body
|
70
70
|
- flush_interval
|
71
|
-
-
|
71
|
+
- split sorted chunks if span over 24h
|
72
72
|
"""
|
73
73
|
|
74
74
|
DEFAULT_URL = 'https://logs.{region_name}.amazonaws.com/' # noqa
|
@@ -141,7 +141,7 @@ class AwsLogMessagePoster:
|
|
141
141
|
message=m.message,
|
142
142
|
timestamp=m.ts_ms,
|
143
143
|
)
|
144
|
-
for m in messages
|
144
|
+
for m in sorted(messages, key=lambda m: m.ts_ms)
|
145
145
|
],
|
146
146
|
)
|
147
147
|
|
File without changes
|
@@ -1,4 +1,5 @@
|
|
1
1
|
# ruff: noqa: UP006 UP007
|
2
|
+
# @omlish-lite
|
2
3
|
import dataclasses as dc
|
3
4
|
import json
|
4
5
|
import typing as ta
|
@@ -23,7 +24,31 @@ class JournalctlMessageBuilder:
|
|
23
24
|
self._buf = DelimitingBuffer(b'\n')
|
24
25
|
|
25
26
|
_cursor_field = '__CURSOR'
|
26
|
-
|
27
|
+
|
28
|
+
_timestamp_fields: ta.Sequence[str] = [
|
29
|
+
'_SOURCE_REALTIME_TIMESTAMP',
|
30
|
+
'__REALTIME_TIMESTAMP',
|
31
|
+
]
|
32
|
+
|
33
|
+
def _get_message_timestamp(self, dct: ta.Mapping[str, ta.Any]) -> ta.Optional[int]:
|
34
|
+
for fld in self._timestamp_fields:
|
35
|
+
if (tsv := dct.get(fld)) is None:
|
36
|
+
continue
|
37
|
+
|
38
|
+
if isinstance(tsv, str):
|
39
|
+
try:
|
40
|
+
return int(tsv)
|
41
|
+
except ValueError:
|
42
|
+
try:
|
43
|
+
return int(float(tsv))
|
44
|
+
except ValueError:
|
45
|
+
log.exception('Failed to parse timestamp: %r', tsv)
|
46
|
+
|
47
|
+
elif isinstance(tsv, (int, float)):
|
48
|
+
return int(tsv)
|
49
|
+
|
50
|
+
log.error('Invalid timestamp: %r', dct)
|
51
|
+
return None
|
27
52
|
|
28
53
|
def _make_message(self, raw: bytes) -> JournalctlMessage:
|
29
54
|
dct = None
|
@@ -37,20 +62,7 @@ class JournalctlMessageBuilder:
|
|
37
62
|
|
38
63
|
else:
|
39
64
|
cursor = dct.get(self._cursor_field)
|
40
|
-
|
41
|
-
if tsv := dct.get(self._timestamp_field):
|
42
|
-
if isinstance(tsv, str):
|
43
|
-
try:
|
44
|
-
ts = int(tsv)
|
45
|
-
except ValueError:
|
46
|
-
try:
|
47
|
-
ts = int(float(tsv))
|
48
|
-
except ValueError:
|
49
|
-
log.exception('Failed to parse timestamp: %r', tsv)
|
50
|
-
elif isinstance(tsv, (int, float)):
|
51
|
-
ts = int(tsv)
|
52
|
-
else:
|
53
|
-
log.exception('Invalid timestamp: %r', tsv)
|
65
|
+
ts = self._get_message_timestamp(dct)
|
54
66
|
|
55
67
|
return JournalctlMessage(
|
56
68
|
raw=raw,
|