ominfra 0.0.0.dev88__py3-none-any.whl → 0.0.0.dev90__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,2172 @@
1
+ #!/usr/bin/env python3
2
+ # noinspection DuplicatedCode
3
+ # @omlish-lite
4
+ # @omlish-script
5
+ # @omlish-amalg-output ../clouds/aws/journald2aws/main.py
6
+ # ruff: noqa: N802 UP006 UP007 UP036
7
+ """
8
+ TODO:
9
+ - create log group
10
+ - log stats - chunk sizes etc
11
+
12
+ ==
13
+
14
+ https://www.freedesktop.org/software/systemd/man/latest/journalctl.html
15
+
16
+ journalctl:
17
+ -o json
18
+ --show-cursor
19
+
20
+ --since "2012-10-30 18:17:16"
21
+ --until "2012-10-30 18:17:16"
22
+
23
+ --after-cursor <cursor>
24
+
25
+ ==
26
+
27
+ https://www.freedesktop.org/software/systemd/man/latest/systemd.journal-fields.html
28
+
29
+ ==
30
+
31
+ @dc.dataclass(frozen=True)
32
+ class Journald2AwsConfig:
33
+ log_group_name: str
34
+ log_stream_name: str
35
+
36
+ aws_batch_size: int = 1_000
37
+ aws_flush_interval_s: float = 1.
38
+ """
39
+ import abc
40
+ import argparse
41
+ import base64
42
+ import collections.abc
43
+ import contextlib
44
+ import dataclasses as dc
45
+ import datetime
46
+ import decimal
47
+ import enum
48
+ import fcntl
49
+ import fractions
50
+ import functools
51
+ import hashlib
52
+ import hmac
53
+ import inspect
54
+ import io
55
+ import json
56
+ import logging
57
+ import os
58
+ import os.path
59
+ import queue
60
+ import shlex
61
+ import signal
62
+ import subprocess
63
+ import sys
64
+ import threading
65
+ import time
66
+ import typing as ta
67
+ import urllib.parse
68
+ import urllib.request
69
+ import uuid
70
+ import weakref # noqa
71
+
72
+
73
+ ########################################
74
+
75
+
76
+ if sys.version_info < (3, 8):
77
+ raise OSError(
78
+ f'Requires python (3, 8), got {sys.version_info} from {sys.executable}') # noqa
79
+
80
+
81
+ ########################################
82
+
83
+
84
+ # ../../../../../omlish/lite/check.py
85
+ T = ta.TypeVar('T')
86
+
87
+
88
+ ########################################
89
+ # ../../../../../omlish/lite/cached.py
90
+
91
+
92
+ class cached_nullary: # noqa
93
+ def __init__(self, fn):
94
+ super().__init__()
95
+ self._fn = fn
96
+ self._value = self._missing = object()
97
+ functools.update_wrapper(self, fn)
98
+
99
+ def __call__(self, *args, **kwargs): # noqa
100
+ if self._value is self._missing:
101
+ self._value = self._fn()
102
+ return self._value
103
+
104
+ def __get__(self, instance, owner): # noqa
105
+ bound = instance.__dict__[self._fn.__name__] = self.__class__(self._fn.__get__(instance, owner))
106
+ return bound
107
+
108
+
109
+ ########################################
110
+ # ../../../../../omlish/lite/check.py
111
+
112
+
113
+ def check_isinstance(v: T, spec: ta.Union[ta.Type[T], tuple]) -> T:
114
+ if not isinstance(v, spec):
115
+ raise TypeError(v)
116
+ return v
117
+
118
+
119
+ def check_not_isinstance(v: T, spec: ta.Union[type, tuple]) -> T:
120
+ if isinstance(v, spec):
121
+ raise TypeError(v)
122
+ return v
123
+
124
+
125
+ def check_not_none(v: ta.Optional[T]) -> T:
126
+ if v is None:
127
+ raise ValueError
128
+ return v
129
+
130
+
131
+ def check_not(v: ta.Any) -> None:
132
+ if v:
133
+ raise ValueError(v)
134
+ return v
135
+
136
+
137
+ def check_non_empty_str(v: ta.Optional[str]) -> str:
138
+ if not v:
139
+ raise ValueError
140
+ return v
141
+
142
+
143
+ def check_state(v: bool, msg: str = 'Illegal state') -> None:
144
+ if not v:
145
+ raise ValueError(msg)
146
+
147
+
148
+ def check_equal(l: T, r: T) -> T:
149
+ if l != r:
150
+ raise ValueError(l, r)
151
+ return l
152
+
153
+
154
+ def check_not_equal(l: T, r: T) -> T:
155
+ if l == r:
156
+ raise ValueError(l, r)
157
+ return l
158
+
159
+
160
+ def check_single(vs: ta.Iterable[T]) -> T:
161
+ [v] = vs
162
+ return v
163
+
164
+
165
+ ########################################
166
+ # ../../../../../omlish/lite/json.py
167
+
168
+
169
+ ##
170
+
171
+
172
+ JSON_PRETTY_INDENT = 2
173
+
174
+ JSON_PRETTY_KWARGS: ta.Mapping[str, ta.Any] = dict(
175
+ indent=JSON_PRETTY_INDENT,
176
+ )
177
+
178
+ json_dump_pretty: ta.Callable[..., bytes] = functools.partial(json.dump, **JSON_PRETTY_KWARGS) # type: ignore
179
+ json_dumps_pretty: ta.Callable[..., str] = functools.partial(json.dumps, **JSON_PRETTY_KWARGS)
180
+
181
+
182
+ ##
183
+
184
+
185
+ JSON_COMPACT_SEPARATORS = (',', ':')
186
+
187
+ JSON_COMPACT_KWARGS: ta.Mapping[str, ta.Any] = dict(
188
+ indent=None,
189
+ separators=JSON_COMPACT_SEPARATORS,
190
+ )
191
+
192
+ json_dump_compact: ta.Callable[..., bytes] = functools.partial(json.dump, **JSON_COMPACT_KWARGS) # type: ignore
193
+ json_dumps_compact: ta.Callable[..., str] = functools.partial(json.dumps, **JSON_COMPACT_KWARGS)
194
+
195
+
196
+ ########################################
197
+ # ../../../../../omlish/lite/pidfile.py
198
+
199
+
200
+ class Pidfile:
201
+ def __init__(self, path: str) -> None:
202
+ super().__init__()
203
+ self._path = path
204
+
205
+ _f: ta.TextIO
206
+
207
+ def __repr__(self) -> str:
208
+ return f'{self.__class__.__name__}({self._path!r})'
209
+
210
+ def __enter__(self) -> 'Pidfile':
211
+ fd = os.open(self._path, os.O_RDWR | os.O_CREAT, 0o600)
212
+ try:
213
+ os.set_inheritable(fd, True)
214
+ f = os.fdopen(fd, 'r+')
215
+ except Exception:
216
+ try:
217
+ os.close(fd)
218
+ except Exception: # noqa
219
+ pass
220
+ raise
221
+ self._f = f
222
+ return self
223
+
224
+ def __exit__(self, exc_type, exc_val, exc_tb):
225
+ if self._f is not None:
226
+ self._f.close()
227
+ del self._f
228
+
229
+ def try_lock(self) -> bool:
230
+ try:
231
+ fcntl.flock(self._f, fcntl.LOCK_EX | fcntl.LOCK_NB)
232
+ return True
233
+ except OSError:
234
+ return False
235
+
236
+ def ensure_locked(self) -> None:
237
+ if not self.try_lock():
238
+ raise RuntimeError('Could not get lock')
239
+
240
+ def write(self, pid: ta.Optional[int] = None) -> None:
241
+ self.ensure_locked()
242
+ if pid is None:
243
+ pid = os.getpid()
244
+ self._f.write(f'{pid}\n')
245
+ self._f.flush()
246
+
247
+ def clear(self) -> None:
248
+ self.ensure_locked()
249
+ self._f.seek(0)
250
+ self._f.truncate()
251
+
252
+ def read(self) -> int:
253
+ if self.try_lock():
254
+ raise RuntimeError('Got lock')
255
+ self._f.seek(0)
256
+ return int(self._f.read())
257
+
258
+ def kill(self, sig: int = signal.SIGTERM) -> None:
259
+ pid = self.read()
260
+ os.kill(pid, sig) # FIXME: Still racy
261
+
262
+
263
+ ########################################
264
+ # ../../../../../omlish/lite/reflect.py
265
+
266
+
267
+ _GENERIC_ALIAS_TYPES = (
268
+ ta._GenericAlias, # type: ignore # noqa
269
+ *([ta._SpecialGenericAlias] if hasattr(ta, '_SpecialGenericAlias') else []), # noqa
270
+ )
271
+
272
+
273
+ def is_generic_alias(obj, *, origin: ta.Any = None) -> bool:
274
+ return (
275
+ isinstance(obj, _GENERIC_ALIAS_TYPES) and
276
+ (origin is None or ta.get_origin(obj) is origin)
277
+ )
278
+
279
+
280
+ is_union_alias = functools.partial(is_generic_alias, origin=ta.Union)
281
+ is_callable_alias = functools.partial(is_generic_alias, origin=ta.Callable)
282
+
283
+
284
+ def is_optional_alias(spec: ta.Any) -> bool:
285
+ return (
286
+ isinstance(spec, _GENERIC_ALIAS_TYPES) and # noqa
287
+ ta.get_origin(spec) is ta.Union and
288
+ len(ta.get_args(spec)) == 2 and
289
+ any(a in (None, type(None)) for a in ta.get_args(spec))
290
+ )
291
+
292
+
293
+ def get_optional_alias_arg(spec: ta.Any) -> ta.Any:
294
+ [it] = [it for it in ta.get_args(spec) if it not in (None, type(None))]
295
+ return it
296
+
297
+
298
+ def deep_subclasses(cls: ta.Type[T]) -> ta.Iterator[ta.Type[T]]:
299
+ seen = set()
300
+ todo = list(reversed(cls.__subclasses__()))
301
+ while todo:
302
+ cur = todo.pop()
303
+ if cur in seen:
304
+ continue
305
+ seen.add(cur)
306
+ yield cur
307
+ todo.extend(reversed(cur.__subclasses__()))
308
+
309
+
310
+ ########################################
311
+ # ../../../../../omlish/lite/strings.py
312
+
313
+
314
+ def camel_case(name: str, lower: bool = False) -> str:
315
+ if not name:
316
+ return ''
317
+ s = ''.join(map(str.capitalize, name.split('_'))) # noqa
318
+ if lower:
319
+ s = s[0].lower() + s[1:]
320
+ return s
321
+
322
+
323
+ def snake_case(name: str) -> str:
324
+ uppers: list[int | None] = [i for i, c in enumerate(name) if c.isupper()]
325
+ return '_'.join([name[l:r].lower() for l, r in zip([None, *uppers], [*uppers, None])]).strip('_')
326
+
327
+
328
+ def is_dunder(name: str) -> bool:
329
+ return (
330
+ name[:2] == name[-2:] == '__' and
331
+ name[2:3] != '_' and
332
+ name[-3:-2] != '_' and
333
+ len(name) > 4
334
+ )
335
+
336
+
337
+ def is_sunder(name: str) -> bool:
338
+ return (
339
+ name[0] == name[-1] == '_' and
340
+ name[1:2] != '_' and
341
+ name[-2:-1] != '_' and
342
+ len(name) > 2
343
+ )
344
+
345
+
346
+ def attr_repr(obj: ta.Any, *attrs: str) -> str:
347
+ return f'{type(obj).__name__}({", ".join(f"{attr}={getattr(obj, attr)!r}" for attr in attrs)})'
348
+
349
+
350
+ ########################################
351
+ # ../../auth.py
352
+ """
353
+ https://docs.aws.amazon.com/IAM/latest/UserGuide/create-signed-request.html
354
+
355
+ TODO:
356
+ - https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-streaming.html
357
+ - boto / s3transfer upload_fileobj doesn't stream either lol - eagerly calcs Content-MD5
358
+ - sts tokens
359
+ - !! fix canonical_qs - sort params
360
+ - secrets
361
+ """
362
+
363
+
364
+ ##
365
+
366
+
367
+ class AwsSigner:
368
+ def __init__(
369
+ self,
370
+ creds: 'AwsSigner.Credentials',
371
+ region_name: str,
372
+ service_name: str,
373
+ ) -> None:
374
+ super().__init__()
375
+ self._creds = creds
376
+ self._region_name = region_name
377
+ self._service_name = service_name
378
+
379
+ #
380
+
381
+ @dc.dataclass(frozen=True)
382
+ class Credentials:
383
+ access_key_id: str
384
+ secret_access_key: str = dc.field(repr=False)
385
+
386
+ @dc.dataclass(frozen=True)
387
+ class Request:
388
+ method: str
389
+ url: str
390
+ headers: ta.Mapping[str, ta.Sequence[str]] = dc.field(default_factory=dict)
391
+ payload: bytes = b''
392
+
393
+ #
394
+
395
+ ISO8601 = '%Y%m%dT%H%M%SZ'
396
+
397
+ #
398
+
399
+ @staticmethod
400
+ def _host_from_url(url: str) -> str:
401
+ url_parts = urllib.parse.urlsplit(url)
402
+ host = check_non_empty_str(url_parts.hostname)
403
+ default_ports = {
404
+ 'http': 80,
405
+ 'https': 443,
406
+ }
407
+ if url_parts.port is not None:
408
+ if url_parts.port != default_ports.get(url_parts.scheme):
409
+ host = '%s:%d' % (host, url_parts.port)
410
+ return host
411
+
412
+ @staticmethod
413
+ def _lower_case_http_map(d: ta.Mapping[str, ta.Sequence[str]]) -> ta.Mapping[str, ta.Sequence[str]]:
414
+ o: ta.Dict[str, ta.List[str]] = {}
415
+ for k, vs in d.items():
416
+ o.setdefault(k.lower(), []).extend(check_not_isinstance(vs, str))
417
+ return o
418
+
419
+ #
420
+
421
+ @staticmethod
422
+ def _as_bytes(data: ta.Union[str, bytes]) -> bytes:
423
+ return data if isinstance(data, bytes) else data.encode('utf-8')
424
+
425
+ @staticmethod
426
+ def _sha256(data: ta.Union[str, bytes]) -> str:
427
+ return hashlib.sha256(AwsSigner._as_bytes(data)).hexdigest()
428
+
429
+ @staticmethod
430
+ def _sha256_sign(key: bytes, msg: ta.Union[str, bytes]) -> bytes:
431
+ return hmac.new(key, AwsSigner._as_bytes(msg), hashlib.sha256).digest()
432
+
433
+ @staticmethod
434
+ def _sha256_sign_hex(key: bytes, msg: ta.Union[str, bytes]) -> str:
435
+ return hmac.new(key, AwsSigner._as_bytes(msg), hashlib.sha256).hexdigest()
436
+
437
+ _EMPTY_SHA256: str
438
+
439
+ #
440
+
441
+ _SIGNED_HEADERS_BLACKLIST = frozenset([
442
+ 'authorization',
443
+ 'expect',
444
+ 'user-agent',
445
+ 'x-amzn-trace-id',
446
+ ])
447
+
448
+ def _validate_request(self, req: Request) -> None:
449
+ check_non_empty_str(req.method)
450
+ check_equal(req.method.upper(), req.method)
451
+ for k, vs in req.headers.items():
452
+ check_equal(k.strip(), k)
453
+ for v in vs:
454
+ check_equal(v.strip(), v)
455
+
456
+
457
+ AwsSigner._EMPTY_SHA256 = AwsSigner._sha256(b'') # noqa
458
+
459
+
460
+ ##
461
+
462
+
463
+ class V4AwsSigner(AwsSigner):
464
+ def sign(
465
+ self,
466
+ req: AwsSigner.Request,
467
+ *,
468
+ sign_payload: bool = False,
469
+ utcnow: ta.Optional[datetime.datetime] = None,
470
+ ) -> ta.Mapping[str, ta.Sequence[str]]:
471
+ self._validate_request(req)
472
+
473
+ #
474
+
475
+ if utcnow is None:
476
+ utcnow = datetime.datetime.now(tz=datetime.timezone.utc) # noqa
477
+ req_dt = utcnow.strftime(self.ISO8601)
478
+
479
+ #
480
+
481
+ parsed_url = urllib.parse.urlsplit(req.url)
482
+ canon_uri = parsed_url.path
483
+ canon_qs = parsed_url.query
484
+
485
+ #
486
+
487
+ headers_to_sign: ta.Dict[str, ta.List[str]] = {
488
+ k: list(v)
489
+ for k, v in self._lower_case_http_map(req.headers).items()
490
+ if k not in self._SIGNED_HEADERS_BLACKLIST
491
+ }
492
+
493
+ if 'host' not in headers_to_sign:
494
+ headers_to_sign['host'] = [self._host_from_url(req.url)]
495
+
496
+ headers_to_sign['x-amz-date'] = [req_dt]
497
+
498
+ hashed_payload = self._sha256(req.payload) if req.payload else self._EMPTY_SHA256
499
+ if sign_payload:
500
+ headers_to_sign['x-amz-content-sha256'] = [hashed_payload]
501
+
502
+ sorted_header_names = sorted(headers_to_sign)
503
+ canon_headers = ''.join([
504
+ ':'.join((k, ','.join(headers_to_sign[k]))) + '\n'
505
+ for k in sorted_header_names
506
+ ])
507
+ signed_headers = ';'.join(sorted_header_names)
508
+
509
+ #
510
+
511
+ canon_req = '\n'.join([
512
+ req.method,
513
+ canon_uri,
514
+ canon_qs,
515
+ canon_headers,
516
+ signed_headers,
517
+ hashed_payload,
518
+ ])
519
+
520
+ #
521
+
522
+ algorithm = 'AWS4-HMAC-SHA256'
523
+ scope_parts = [
524
+ req_dt[:8],
525
+ self._region_name,
526
+ self._service_name,
527
+ 'aws4_request',
528
+ ]
529
+ scope = '/'.join(scope_parts)
530
+ hashed_canon_req = self._sha256(canon_req)
531
+ string_to_sign = '\n'.join([
532
+ algorithm,
533
+ req_dt,
534
+ scope,
535
+ hashed_canon_req,
536
+ ])
537
+
538
+ #
539
+
540
+ key = self._creds.secret_access_key
541
+ key_date = self._sha256_sign(f'AWS4{key}'.encode('utf-8'), req_dt[:8]) # noqa
542
+ key_region = self._sha256_sign(key_date, self._region_name)
543
+ key_service = self._sha256_sign(key_region, self._service_name)
544
+ key_signing = self._sha256_sign(key_service, 'aws4_request')
545
+ sig = self._sha256_sign_hex(key_signing, string_to_sign)
546
+
547
+ #
548
+
549
+ cred_scope = '/'.join([
550
+ self._creds.access_key_id,
551
+ *scope_parts,
552
+ ])
553
+ auth = f'{algorithm} ' + ', '.join([
554
+ f'Credential={cred_scope}',
555
+ f'SignedHeaders={signed_headers}',
556
+ f'Signature={sig}',
557
+ ])
558
+
559
+ #
560
+
561
+ out = {
562
+ 'Authorization': [auth],
563
+ 'X-Amz-Date': [req_dt],
564
+ }
565
+ if sign_payload:
566
+ out['X-Amz-Content-SHA256'] = [hashed_payload]
567
+ return out
568
+
569
+
570
+ ########################################
571
+ # ../../dataclasses.py
572
+
573
+
574
+ class AwsDataclass:
575
+ class Raw(dict):
576
+ pass
577
+
578
+ #
579
+
580
+ _aws_meta: ta.ClassVar[ta.Optional['AwsDataclassMeta']] = None
581
+
582
+ @classmethod
583
+ def _get_aws_meta(cls) -> 'AwsDataclassMeta':
584
+ try:
585
+ return cls.__dict__['_aws_meta']
586
+ except KeyError:
587
+ pass
588
+ ret = cls._aws_meta = AwsDataclassMeta(cls)
589
+ return ret
590
+
591
+ #
592
+
593
+ def to_aws(self) -> ta.Mapping[str, ta.Any]:
594
+ return self._get_aws_meta().converters().d2a(self)
595
+
596
+ @classmethod
597
+ def from_aws(cls, v: ta.Mapping[str, ta.Any]) -> 'AwsDataclass':
598
+ return cls._get_aws_meta().converters().a2d(v)
599
+
600
+
601
+ @dc.dataclass(frozen=True)
602
+ class AwsDataclassMeta:
603
+ cls: ta.Type['AwsDataclass']
604
+
605
+ #
606
+
607
+ class Field(ta.NamedTuple):
608
+ d_name: str
609
+ a_name: str
610
+ is_opt: bool
611
+ is_seq: bool
612
+ dc_cls: ta.Optional[ta.Type['AwsDataclass']]
613
+
614
+ @cached_nullary
615
+ def fields(self) -> ta.Sequence[Field]:
616
+ fs = []
617
+ for f in dc.fields(self.cls): # type: ignore # noqa
618
+ d_name = f.name
619
+ a_name = camel_case(d_name, lower=True)
620
+
621
+ is_opt = False
622
+ is_seq = False
623
+ dc_cls = None
624
+
625
+ c = f.type
626
+ if c is AwsDataclass.Raw:
627
+ continue
628
+
629
+ if is_optional_alias(c):
630
+ is_opt = True
631
+ c = get_optional_alias_arg(c)
632
+
633
+ if is_generic_alias(c) and ta.get_origin(c) is collections.abc.Sequence:
634
+ is_seq = True
635
+ [c] = ta.get_args(c)
636
+
637
+ if is_generic_alias(c):
638
+ raise TypeError(c)
639
+
640
+ if isinstance(c, type) and issubclass(c, AwsDataclass):
641
+ dc_cls = c
642
+
643
+ fs.append(AwsDataclassMeta.Field(
644
+ d_name=d_name,
645
+ a_name=a_name,
646
+ is_opt=is_opt,
647
+ is_seq=is_seq,
648
+ dc_cls=dc_cls,
649
+ ))
650
+
651
+ return fs
652
+
653
+ #
654
+
655
+ class Converters(ta.NamedTuple):
656
+ d2a: ta.Callable
657
+ a2d: ta.Callable
658
+
659
+ @cached_nullary
660
+ def converters(self) -> Converters:
661
+ for df in dc.fields(self.cls): # type: ignore # noqa
662
+ c = df.type
663
+
664
+ if is_optional_alias(c):
665
+ c = get_optional_alias_arg(c)
666
+
667
+ if c is AwsDataclass.Raw:
668
+ rf = df.name
669
+ break
670
+
671
+ else:
672
+ rf = None
673
+
674
+ fs = [
675
+ (f, f.dc_cls._get_aws_meta().converters() if f.dc_cls is not None else None) # noqa
676
+ for f in self.fields()
677
+ ]
678
+
679
+ def d2a(o):
680
+ dct = {}
681
+ for f, cs in fs:
682
+ x = getattr(o, f.d_name)
683
+ if x is None:
684
+ continue
685
+ if cs is not None:
686
+ if f.is_seq:
687
+ x = list(map(cs.d2a, x))
688
+ else:
689
+ x = cs.d2a(x)
690
+ dct[f.a_name] = x
691
+ return dct
692
+
693
+ def a2d(v):
694
+ dct = {}
695
+ for f, cs in fs:
696
+ x = v.get(f.a_name)
697
+ if x is None:
698
+ continue
699
+ if cs is not None:
700
+ if f.is_seq:
701
+ x = list(map(cs.a2d, x))
702
+ else:
703
+ x = cs.a2d(x)
704
+ dct[f.d_name] = x
705
+ if rf is not None:
706
+ dct[rf] = self.cls.Raw(v)
707
+ return self.cls(**dct)
708
+
709
+ return AwsDataclassMeta.Converters(d2a, a2d)
710
+
711
+
712
+ ########################################
713
+ # ../../../../../omlish/lite/io.py
714
+
715
+
716
+ class DelimitingBuffer:
717
+ """
718
+ https://github.com/python-trio/trio/issues/796 :|
719
+ """
720
+
721
+ #
722
+
723
+ class Error(Exception):
724
+ def __init__(self, buffer: 'DelimitingBuffer') -> None:
725
+ super().__init__(buffer)
726
+ self.buffer = buffer
727
+
728
+ def __repr__(self) -> str:
729
+ return attr_repr(self, 'buffer')
730
+
731
+ class ClosedError(Error):
732
+ pass
733
+
734
+ #
735
+
736
+ DEFAULT_DELIMITERS: bytes = b'\n'
737
+
738
+ def __init__(
739
+ self,
740
+ delimiters: ta.Iterable[int] = DEFAULT_DELIMITERS,
741
+ *,
742
+ keep_ends: bool = False,
743
+ max_size: ta.Optional[int] = None,
744
+ ) -> None:
745
+ super().__init__()
746
+
747
+ self._delimiters = frozenset(check_isinstance(d, int) for d in delimiters)
748
+ self._keep_ends = keep_ends
749
+ self._max_size = max_size
750
+
751
+ self._buf: ta.Optional[io.BytesIO] = io.BytesIO()
752
+
753
+ #
754
+
755
+ @property
756
+ def is_closed(self) -> bool:
757
+ return self._buf is None
758
+
759
+ def tell(self) -> int:
760
+ if (buf := self._buf) is None:
761
+ raise self.ClosedError(self)
762
+ return buf.tell()
763
+
764
+ def peek(self) -> bytes:
765
+ if (buf := self._buf) is None:
766
+ raise self.ClosedError(self)
767
+ return buf.getvalue()
768
+
769
+ def _find_delim(self, data: ta.Union[bytes, bytearray], i: int) -> ta.Optional[int]:
770
+ r = None # type: int | None
771
+ for d in self._delimiters:
772
+ if (p := data.find(d, i)) >= 0:
773
+ if r is None or p < r:
774
+ r = p
775
+ return r
776
+
777
+ def _append_and_reset(self, chunk: bytes) -> bytes:
778
+ buf = check_not_none(self._buf)
779
+ if not buf.tell():
780
+ return chunk
781
+
782
+ buf.write(chunk)
783
+ ret = buf.getvalue()
784
+ buf.seek(0)
785
+ buf.truncate()
786
+ return ret
787
+
788
+ class Incomplete(ta.NamedTuple):
789
+ b: bytes
790
+
791
+ def feed(self, data: ta.Union[bytes, bytearray]) -> ta.Generator[ta.Union[bytes, Incomplete], None, None]:
792
+ if (buf := self._buf) is None:
793
+ raise self.ClosedError(self)
794
+
795
+ if not data:
796
+ self._buf = None
797
+
798
+ if buf.tell():
799
+ yield self.Incomplete(buf.getvalue())
800
+
801
+ return
802
+
803
+ l = len(data)
804
+ i = 0
805
+ while i < l:
806
+ if (p := self._find_delim(data, i)) is None:
807
+ break
808
+
809
+ n = p + 1
810
+ if self._keep_ends:
811
+ p = n
812
+
813
+ yield self._append_and_reset(data[i:p])
814
+
815
+ i = n
816
+
817
+ if i >= l:
818
+ return
819
+
820
+ if self._max_size is None:
821
+ buf.write(data[i:])
822
+ return
823
+
824
+ while i < l:
825
+ remaining_data_len = l - i
826
+ remaining_buf_capacity = self._max_size - buf.tell()
827
+
828
+ if remaining_data_len < remaining_buf_capacity:
829
+ buf.write(data[i:])
830
+ return
831
+
832
+ p = i + remaining_buf_capacity
833
+ yield self.Incomplete(self._append_and_reset(data[i:p]))
834
+ i = p
835
+
836
+
837
+ ########################################
838
+ # ../../../../../omlish/lite/logs.py
839
+ """
840
+ TODO:
841
+ - translate json keys
842
+ - debug
843
+ """
844
+
845
+
846
+ log = logging.getLogger(__name__)
847
+
848
+
849
+ ##
850
+
851
+
852
+ class TidLogFilter(logging.Filter):
853
+
854
+ def filter(self, record):
855
+ record.tid = threading.get_native_id()
856
+ return True
857
+
858
+
859
+ ##
860
+
861
+
862
+ class JsonLogFormatter(logging.Formatter):
863
+
864
+ KEYS: ta.Mapping[str, bool] = {
865
+ 'name': False,
866
+ 'msg': False,
867
+ 'args': False,
868
+ 'levelname': False,
869
+ 'levelno': False,
870
+ 'pathname': False,
871
+ 'filename': False,
872
+ 'module': False,
873
+ 'exc_info': True,
874
+ 'exc_text': True,
875
+ 'stack_info': True,
876
+ 'lineno': False,
877
+ 'funcName': False,
878
+ 'created': False,
879
+ 'msecs': False,
880
+ 'relativeCreated': False,
881
+ 'thread': False,
882
+ 'threadName': False,
883
+ 'processName': False,
884
+ 'process': False,
885
+ }
886
+
887
+ def format(self, record: logging.LogRecord) -> str:
888
+ dct = {
889
+ k: v
890
+ for k, o in self.KEYS.items()
891
+ for v in [getattr(record, k)]
892
+ if not (o and v is None)
893
+ }
894
+ return json_dumps_compact(dct)
895
+
896
+
897
+ ##
898
+
899
+
900
+ STANDARD_LOG_FORMAT_PARTS = [
901
+ ('asctime', '%(asctime)-15s'),
902
+ ('process', 'pid=%(process)-6s'),
903
+ ('thread', 'tid=%(thread)x'),
904
+ ('levelname', '%(levelname)s'),
905
+ ('name', '%(name)s'),
906
+ ('separator', '::'),
907
+ ('message', '%(message)s'),
908
+ ]
909
+
910
+
911
+ class StandardLogFormatter(logging.Formatter):
912
+
913
+ @staticmethod
914
+ def build_log_format(parts: ta.Iterable[ta.Tuple[str, str]]) -> str:
915
+ return ' '.join(v for k, v in parts)
916
+
917
+ converter = datetime.datetime.fromtimestamp # type: ignore
918
+
919
+ def formatTime(self, record, datefmt=None):
920
+ ct = self.converter(record.created) # type: ignore
921
+ if datefmt:
922
+ return ct.strftime(datefmt) # noqa
923
+ else:
924
+ t = ct.strftime("%Y-%m-%d %H:%M:%S") # noqa
925
+ return '%s.%03d' % (t, record.msecs)
926
+
927
+
928
+ ##
929
+
930
+
931
+ class ProxyLogFilterer(logging.Filterer):
932
+ def __init__(self, underlying: logging.Filterer) -> None: # noqa
933
+ self._underlying = underlying
934
+
935
+ @property
936
+ def underlying(self) -> logging.Filterer:
937
+ return self._underlying
938
+
939
+ @property
940
+ def filters(self):
941
+ return self._underlying.filters
942
+
943
+ @filters.setter
944
+ def filters(self, filters):
945
+ self._underlying.filters = filters
946
+
947
+ def addFilter(self, filter): # noqa
948
+ self._underlying.addFilter(filter)
949
+
950
+ def removeFilter(self, filter): # noqa
951
+ self._underlying.removeFilter(filter)
952
+
953
+ def filter(self, record):
954
+ return self._underlying.filter(record)
955
+
956
+
957
+ class ProxyLogHandler(ProxyLogFilterer, logging.Handler):
958
+ def __init__(self, underlying: logging.Handler) -> None: # noqa
959
+ ProxyLogFilterer.__init__(self, underlying)
960
+
961
+ _underlying: logging.Handler
962
+
963
+ @property
964
+ def underlying(self) -> logging.Handler:
965
+ return self._underlying
966
+
967
+ def get_name(self):
968
+ return self._underlying.get_name()
969
+
970
+ def set_name(self, name):
971
+ self._underlying.set_name(name)
972
+
973
+ @property
974
+ def name(self):
975
+ return self._underlying.name
976
+
977
+ @property
978
+ def level(self):
979
+ return self._underlying.level
980
+
981
+ @level.setter
982
+ def level(self, level):
983
+ self._underlying.level = level
984
+
985
+ @property
986
+ def formatter(self):
987
+ return self._underlying.formatter
988
+
989
+ @formatter.setter
990
+ def formatter(self, formatter):
991
+ self._underlying.formatter = formatter
992
+
993
+ def createLock(self):
994
+ self._underlying.createLock()
995
+
996
+ def acquire(self):
997
+ self._underlying.acquire()
998
+
999
+ def release(self):
1000
+ self._underlying.release()
1001
+
1002
+ def setLevel(self, level):
1003
+ self._underlying.setLevel(level)
1004
+
1005
+ def format(self, record):
1006
+ return self._underlying.format(record)
1007
+
1008
+ def emit(self, record):
1009
+ self._underlying.emit(record)
1010
+
1011
+ def handle(self, record):
1012
+ return self._underlying.handle(record)
1013
+
1014
+ def setFormatter(self, fmt):
1015
+ self._underlying.setFormatter(fmt)
1016
+
1017
+ def flush(self):
1018
+ self._underlying.flush()
1019
+
1020
+ def close(self):
1021
+ self._underlying.close()
1022
+
1023
+ def handleError(self, record):
1024
+ self._underlying.handleError(record)
1025
+
1026
+
1027
+ ##
1028
+
1029
+
1030
+ class StandardLogHandler(ProxyLogHandler):
1031
+ pass
1032
+
1033
+
1034
+ ##
1035
+
1036
+
1037
+ @contextlib.contextmanager
1038
+ def _locking_logging_module_lock() -> ta.Iterator[None]:
1039
+ if hasattr(logging, '_acquireLock'):
1040
+ logging._acquireLock() # noqa
1041
+ try:
1042
+ yield
1043
+ finally:
1044
+ logging._releaseLock() # type: ignore # noqa
1045
+
1046
+ elif hasattr(logging, '_lock'):
1047
+ # https://github.com/python/cpython/commit/74723e11109a320e628898817ab449b3dad9ee96
1048
+ with logging._lock: # noqa
1049
+ yield
1050
+
1051
+ else:
1052
+ raise Exception("Can't find lock in logging module")
1053
+
1054
+
1055
+ def configure_standard_logging(
1056
+ level: ta.Union[int, str] = logging.INFO,
1057
+ *,
1058
+ json: bool = False,
1059
+ target: ta.Optional[logging.Logger] = None,
1060
+ force: bool = False,
1061
+ ) -> ta.Optional[StandardLogHandler]:
1062
+ with _locking_logging_module_lock():
1063
+ if target is None:
1064
+ target = logging.root
1065
+
1066
+ #
1067
+
1068
+ if not force:
1069
+ if any(isinstance(h, StandardLogHandler) for h in list(target.handlers)):
1070
+ return None
1071
+
1072
+ #
1073
+
1074
+ handler = logging.StreamHandler()
1075
+
1076
+ #
1077
+
1078
+ formatter: logging.Formatter
1079
+ if json:
1080
+ formatter = JsonLogFormatter()
1081
+ else:
1082
+ formatter = StandardLogFormatter(StandardLogFormatter.build_log_format(STANDARD_LOG_FORMAT_PARTS))
1083
+ handler.setFormatter(formatter)
1084
+
1085
+ #
1086
+
1087
+ handler.addFilter(TidLogFilter())
1088
+
1089
+ #
1090
+
1091
+ target.addHandler(handler)
1092
+
1093
+ #
1094
+
1095
+ if level is not None:
1096
+ target.setLevel(level)
1097
+
1098
+ #
1099
+
1100
+ return StandardLogHandler(handler)
1101
+
1102
+
1103
+ ########################################
1104
+ # ../../../../../omlish/lite/marshal.py
1105
+ """
1106
+ TODO:
1107
+ - pickle stdlib objs? have to pin to 3.8 pickle protocol, will be cross-version
1108
+ - nonstrict toggle
1109
+ """
1110
+
1111
+
1112
+ ##
1113
+
1114
+
1115
+ class ObjMarshaler(abc.ABC):
1116
+ @abc.abstractmethod
1117
+ def marshal(self, o: ta.Any) -> ta.Any:
1118
+ raise NotImplementedError
1119
+
1120
+ @abc.abstractmethod
1121
+ def unmarshal(self, o: ta.Any) -> ta.Any:
1122
+ raise NotImplementedError
1123
+
1124
+
1125
+ class NopObjMarshaler(ObjMarshaler):
1126
+ def marshal(self, o: ta.Any) -> ta.Any:
1127
+ return o
1128
+
1129
+ def unmarshal(self, o: ta.Any) -> ta.Any:
1130
+ return o
1131
+
1132
+
1133
+ @dc.dataclass()
1134
+ class ProxyObjMarshaler(ObjMarshaler):
1135
+ m: ta.Optional[ObjMarshaler] = None
1136
+
1137
+ def marshal(self, o: ta.Any) -> ta.Any:
1138
+ return check_not_none(self.m).marshal(o)
1139
+
1140
+ def unmarshal(self, o: ta.Any) -> ta.Any:
1141
+ return check_not_none(self.m).unmarshal(o)
1142
+
1143
+
1144
+ @dc.dataclass(frozen=True)
1145
+ class CastObjMarshaler(ObjMarshaler):
1146
+ ty: type
1147
+
1148
+ def marshal(self, o: ta.Any) -> ta.Any:
1149
+ return o
1150
+
1151
+ def unmarshal(self, o: ta.Any) -> ta.Any:
1152
+ return self.ty(o)
1153
+
1154
+
1155
+ class DynamicObjMarshaler(ObjMarshaler):
1156
+ def marshal(self, o: ta.Any) -> ta.Any:
1157
+ return marshal_obj(o)
1158
+
1159
+ def unmarshal(self, o: ta.Any) -> ta.Any:
1160
+ return o
1161
+
1162
+
1163
+ @dc.dataclass(frozen=True)
1164
+ class Base64ObjMarshaler(ObjMarshaler):
1165
+ ty: type
1166
+
1167
+ def marshal(self, o: ta.Any) -> ta.Any:
1168
+ return base64.b64encode(o).decode('ascii')
1169
+
1170
+ def unmarshal(self, o: ta.Any) -> ta.Any:
1171
+ return self.ty(base64.b64decode(o))
1172
+
1173
+
1174
+ @dc.dataclass(frozen=True)
1175
+ class EnumObjMarshaler(ObjMarshaler):
1176
+ ty: type
1177
+
1178
+ def marshal(self, o: ta.Any) -> ta.Any:
1179
+ return o.name
1180
+
1181
+ def unmarshal(self, o: ta.Any) -> ta.Any:
1182
+ return self.ty.__members__[o] # type: ignore
1183
+
1184
+
1185
+ @dc.dataclass(frozen=True)
1186
+ class OptionalObjMarshaler(ObjMarshaler):
1187
+ item: ObjMarshaler
1188
+
1189
+ def marshal(self, o: ta.Any) -> ta.Any:
1190
+ if o is None:
1191
+ return None
1192
+ return self.item.marshal(o)
1193
+
1194
+ def unmarshal(self, o: ta.Any) -> ta.Any:
1195
+ if o is None:
1196
+ return None
1197
+ return self.item.unmarshal(o)
1198
+
1199
+
1200
+ @dc.dataclass(frozen=True)
1201
+ class MappingObjMarshaler(ObjMarshaler):
1202
+ ty: type
1203
+ km: ObjMarshaler
1204
+ vm: ObjMarshaler
1205
+
1206
+ def marshal(self, o: ta.Any) -> ta.Any:
1207
+ return {self.km.marshal(k): self.vm.marshal(v) for k, v in o.items()}
1208
+
1209
+ def unmarshal(self, o: ta.Any) -> ta.Any:
1210
+ return self.ty((self.km.unmarshal(k), self.vm.unmarshal(v)) for k, v in o.items())
1211
+
1212
+
1213
+ @dc.dataclass(frozen=True)
1214
+ class IterableObjMarshaler(ObjMarshaler):
1215
+ ty: type
1216
+ item: ObjMarshaler
1217
+
1218
+ def marshal(self, o: ta.Any) -> ta.Any:
1219
+ return [self.item.marshal(e) for e in o]
1220
+
1221
+ def unmarshal(self, o: ta.Any) -> ta.Any:
1222
+ return self.ty(self.item.unmarshal(e) for e in o)
1223
+
1224
+
1225
+ @dc.dataclass(frozen=True)
1226
+ class DataclassObjMarshaler(ObjMarshaler):
1227
+ ty: type
1228
+ fs: ta.Mapping[str, ObjMarshaler]
1229
+ nonstrict: bool = False
1230
+
1231
+ def marshal(self, o: ta.Any) -> ta.Any:
1232
+ return {k: m.marshal(getattr(o, k)) for k, m in self.fs.items()}
1233
+
1234
+ def unmarshal(self, o: ta.Any) -> ta.Any:
1235
+ return self.ty(**{k: self.fs[k].unmarshal(v) for k, v in o.items() if self.nonstrict or k in self.fs})
1236
+
1237
+
1238
+ @dc.dataclass(frozen=True)
1239
+ class PolymorphicObjMarshaler(ObjMarshaler):
1240
+ class Impl(ta.NamedTuple):
1241
+ ty: type
1242
+ tag: str
1243
+ m: ObjMarshaler
1244
+
1245
+ impls_by_ty: ta.Mapping[type, Impl]
1246
+ impls_by_tag: ta.Mapping[str, Impl]
1247
+
1248
+ def marshal(self, o: ta.Any) -> ta.Any:
1249
+ impl = self.impls_by_ty[type(o)]
1250
+ return {impl.tag: impl.m.marshal(o)}
1251
+
1252
+ def unmarshal(self, o: ta.Any) -> ta.Any:
1253
+ [(t, v)] = o.items()
1254
+ impl = self.impls_by_tag[t]
1255
+ return impl.m.unmarshal(v)
1256
+
1257
+
1258
+ @dc.dataclass(frozen=True)
1259
+ class DatetimeObjMarshaler(ObjMarshaler):
1260
+ ty: type
1261
+
1262
+ def marshal(self, o: ta.Any) -> ta.Any:
1263
+ return o.isoformat()
1264
+
1265
+ def unmarshal(self, o: ta.Any) -> ta.Any:
1266
+ return self.ty.fromisoformat(o) # type: ignore
1267
+
1268
+
1269
+ class DecimalObjMarshaler(ObjMarshaler):
1270
+ def marshal(self, o: ta.Any) -> ta.Any:
1271
+ return str(check_isinstance(o, decimal.Decimal))
1272
+
1273
+ def unmarshal(self, v: ta.Any) -> ta.Any:
1274
+ return decimal.Decimal(check_isinstance(v, str))
1275
+
1276
+
1277
+ class FractionObjMarshaler(ObjMarshaler):
1278
+ def marshal(self, o: ta.Any) -> ta.Any:
1279
+ fr = check_isinstance(o, fractions.Fraction)
1280
+ return [fr.numerator, fr.denominator]
1281
+
1282
+ def unmarshal(self, v: ta.Any) -> ta.Any:
1283
+ num, denom = check_isinstance(v, list)
1284
+ return fractions.Fraction(num, denom)
1285
+
1286
+
1287
+ class UuidObjMarshaler(ObjMarshaler):
1288
+ def marshal(self, o: ta.Any) -> ta.Any:
1289
+ return str(o)
1290
+
1291
+ def unmarshal(self, o: ta.Any) -> ta.Any:
1292
+ return uuid.UUID(o)
1293
+
1294
+
1295
+ _OBJ_MARSHALERS: ta.Dict[ta.Any, ObjMarshaler] = {
1296
+ **{t: NopObjMarshaler() for t in (type(None),)},
1297
+ **{t: CastObjMarshaler(t) for t in (int, float, str, bool)},
1298
+ **{t: Base64ObjMarshaler(t) for t in (bytes, bytearray)},
1299
+ **{t: IterableObjMarshaler(t, DynamicObjMarshaler()) for t in (list, tuple, set, frozenset)},
1300
+ **{t: MappingObjMarshaler(t, DynamicObjMarshaler(), DynamicObjMarshaler()) for t in (dict,)},
1301
+
1302
+ ta.Any: DynamicObjMarshaler(),
1303
+
1304
+ **{t: DatetimeObjMarshaler(t) for t in (datetime.date, datetime.time, datetime.datetime)},
1305
+ decimal.Decimal: DecimalObjMarshaler(),
1306
+ fractions.Fraction: FractionObjMarshaler(),
1307
+ uuid.UUID: UuidObjMarshaler(),
1308
+ }
1309
+
1310
+ _OBJ_MARSHALER_GENERIC_MAPPING_TYPES: ta.Dict[ta.Any, type] = {
1311
+ **{t: t for t in (dict,)},
1312
+ **{t: dict for t in (collections.abc.Mapping, collections.abc.MutableMapping)},
1313
+ }
1314
+
1315
+ _OBJ_MARSHALER_GENERIC_ITERABLE_TYPES: ta.Dict[ta.Any, type] = {
1316
+ **{t: t for t in (list, tuple, set, frozenset)},
1317
+ collections.abc.Set: frozenset,
1318
+ collections.abc.MutableSet: set,
1319
+ collections.abc.Sequence: tuple,
1320
+ collections.abc.MutableSequence: list,
1321
+ }
1322
+
1323
+
1324
+ def register_opj_marshaler(ty: ta.Any, m: ObjMarshaler) -> None:
1325
+ if ty in _OBJ_MARSHALERS:
1326
+ raise KeyError(ty)
1327
+ _OBJ_MARSHALERS[ty] = m
1328
+
1329
+
1330
+ def _make_obj_marshaler(ty: ta.Any) -> ObjMarshaler:
1331
+ if isinstance(ty, type):
1332
+ if abc.ABC in ty.__bases__:
1333
+ impls = [ # type: ignore
1334
+ PolymorphicObjMarshaler.Impl(
1335
+ ity,
1336
+ ity.__qualname__,
1337
+ get_obj_marshaler(ity),
1338
+ )
1339
+ for ity in deep_subclasses(ty)
1340
+ if abc.ABC not in ity.__bases__
1341
+ ]
1342
+ return PolymorphicObjMarshaler(
1343
+ {i.ty: i for i in impls},
1344
+ {i.tag: i for i in impls},
1345
+ )
1346
+
1347
+ if issubclass(ty, enum.Enum):
1348
+ return EnumObjMarshaler(ty)
1349
+
1350
+ if dc.is_dataclass(ty):
1351
+ return DataclassObjMarshaler(
1352
+ ty,
1353
+ {f.name: get_obj_marshaler(f.type) for f in dc.fields(ty)},
1354
+ )
1355
+
1356
+ if is_generic_alias(ty):
1357
+ try:
1358
+ mt = _OBJ_MARSHALER_GENERIC_MAPPING_TYPES[ta.get_origin(ty)]
1359
+ except KeyError:
1360
+ pass
1361
+ else:
1362
+ k, v = ta.get_args(ty)
1363
+ return MappingObjMarshaler(mt, get_obj_marshaler(k), get_obj_marshaler(v))
1364
+
1365
+ try:
1366
+ st = _OBJ_MARSHALER_GENERIC_ITERABLE_TYPES[ta.get_origin(ty)]
1367
+ except KeyError:
1368
+ pass
1369
+ else:
1370
+ [e] = ta.get_args(ty)
1371
+ return IterableObjMarshaler(st, get_obj_marshaler(e))
1372
+
1373
+ if is_union_alias(ty):
1374
+ return OptionalObjMarshaler(get_obj_marshaler(get_optional_alias_arg(ty)))
1375
+
1376
+ raise TypeError(ty)
1377
+
1378
+
1379
+ def get_obj_marshaler(ty: ta.Any) -> ObjMarshaler:
1380
+ try:
1381
+ return _OBJ_MARSHALERS[ty]
1382
+ except KeyError:
1383
+ pass
1384
+
1385
+ p = ProxyObjMarshaler()
1386
+ _OBJ_MARSHALERS[ty] = p
1387
+ try:
1388
+ m = _make_obj_marshaler(ty)
1389
+ except Exception:
1390
+ del _OBJ_MARSHALERS[ty]
1391
+ raise
1392
+ else:
1393
+ p.m = m
1394
+ _OBJ_MARSHALERS[ty] = m
1395
+ return m
1396
+
1397
+
1398
+ def marshal_obj(o: ta.Any, ty: ta.Any = None) -> ta.Any:
1399
+ return get_obj_marshaler(ty if ty is not None else type(o)).marshal(o)
1400
+
1401
+
1402
+ def unmarshal_obj(o: ta.Any, ty: ta.Union[ta.Type[T], ta.Any]) -> T:
1403
+ return get_obj_marshaler(ty).unmarshal(o)
1404
+
1405
+
1406
+ ########################################
1407
+ # ../../../../../omlish/lite/runtime.py
1408
+
1409
+
1410
+ @cached_nullary
1411
+ def is_debugger_attached() -> bool:
1412
+ return any(frame[1].endswith('pydevd.py') for frame in inspect.stack())
1413
+
1414
+
1415
+ REQUIRED_PYTHON_VERSION = (3, 8)
1416
+
1417
+
1418
+ def check_runtime_version() -> None:
1419
+ if sys.version_info < REQUIRED_PYTHON_VERSION:
1420
+ raise OSError(
1421
+ f'Requires python {REQUIRED_PYTHON_VERSION}, got {sys.version_info} from {sys.executable}') # noqa
1422
+
1423
+
1424
+ ########################################
1425
+ # ../journald/messages.py
1426
+
1427
+
1428
+ @dc.dataclass(frozen=True)
1429
+ class JournalctlMessage:
1430
+ raw: bytes
1431
+ dct: ta.Optional[ta.Mapping[str, ta.Any]] = None
1432
+ cursor: ta.Optional[str] = None
1433
+ ts_us: ta.Optional[int] = None # microseconds UTC
1434
+
1435
+
1436
+ class JournalctlMessageBuilder:
1437
+ def __init__(self) -> None:
1438
+ super().__init__()
1439
+
1440
+ self._buf = DelimitingBuffer(b'\n')
1441
+
1442
+ _cursor_field = '__CURSOR'
1443
+ _timestamp_field = '_SOURCE_REALTIME_TIMESTAMP'
1444
+
1445
+ def _make_message(self, raw: bytes) -> JournalctlMessage:
1446
+ dct = None
1447
+ cursor = None
1448
+ ts = None
1449
+
1450
+ try:
1451
+ dct = json.loads(raw.decode('utf-8', 'replace'))
1452
+ except Exception: # noqa
1453
+ log.exception('Failed to parse raw message: %r', raw)
1454
+
1455
+ else:
1456
+ cursor = dct.get(self._cursor_field)
1457
+
1458
+ if tsv := dct.get(self._timestamp_field):
1459
+ if isinstance(tsv, str):
1460
+ try:
1461
+ ts = int(tsv)
1462
+ except ValueError:
1463
+ try:
1464
+ ts = int(float(tsv))
1465
+ except ValueError:
1466
+ log.exception('Failed to parse timestamp: %r', tsv)
1467
+ elif isinstance(tsv, (int, float)):
1468
+ ts = int(tsv)
1469
+ else:
1470
+ log.exception('Invalid timestamp: %r', tsv)
1471
+
1472
+ return JournalctlMessage(
1473
+ raw=raw,
1474
+ dct=dct,
1475
+ cursor=cursor,
1476
+ ts_us=ts,
1477
+ )
1478
+
1479
+ def feed(self, data: bytes) -> ta.Sequence[JournalctlMessage]:
1480
+ ret: ta.List[JournalctlMessage] = []
1481
+ for line in self._buf.feed(data):
1482
+ ret.append(self._make_message(check_isinstance(line, bytes))) # type: ignore
1483
+ return ret
1484
+
1485
+
1486
+ ########################################
1487
+ # ../threadworker.py
1488
+
1489
+
1490
+ class ThreadWorker(abc.ABC):
1491
+ def __init__(
1492
+ self,
1493
+ *,
1494
+ stop_event: ta.Optional[threading.Event] = None,
1495
+ ) -> None:
1496
+ super().__init__()
1497
+
1498
+ if stop_event is None:
1499
+ stop_event = threading.Event()
1500
+ self._stop_event = stop_event
1501
+
1502
+ self._thread: ta.Optional[threading.Thread] = None
1503
+
1504
+ self._last_heartbeat: ta.Optional[float] = None
1505
+
1506
+ #
1507
+
1508
+ def should_stop(self) -> bool:
1509
+ return self._stop_event.is_set()
1510
+
1511
+ #
1512
+
1513
+ @property
1514
+ def last_heartbeat(self) -> ta.Optional[float]:
1515
+ return self._last_heartbeat
1516
+
1517
+ def _heartbeat(self) -> bool:
1518
+ self._last_heartbeat = time.time()
1519
+
1520
+ if self.should_stop():
1521
+ log.info('Stopping: %s', self)
1522
+ return False
1523
+
1524
+ return True
1525
+
1526
+ #
1527
+
1528
+ def is_alive(self) -> bool:
1529
+ return (thr := self._thread) is not None and thr.is_alive()
1530
+
1531
+ def start(self) -> None:
1532
+ thr = threading.Thread(target=self._run)
1533
+ self._thread = thr
1534
+ thr.start()
1535
+
1536
+ @abc.abstractmethod
1537
+ def _run(self) -> None:
1538
+ raise NotImplementedError
1539
+
1540
+ def stop(self) -> None:
1541
+ raise NotImplementedError
1542
+
1543
+ def cleanup(self) -> None: # noqa
1544
+ pass
1545
+
1546
+
1547
+ ########################################
1548
+ # ../../logs.py
1549
+ """
1550
+ https://docs.aws.amazon.com/AmazonCloudWatchLogs/latest/APIReference/API_PutLogEvents.html :
1551
+ - The maximum batch size is 1,048,576 bytes. This size is calculated as the sum of all event messages in UTF-8, plus 26
1552
+ bytes for each log event.
1553
+ - None of the log events in the batch can be more than 2 hours in the future.
1554
+ - None of the log events in the batch can be more than 14 days in the past. Also, none of the log events can be from
1555
+ earlier than the retention period of the log group.
1556
+ - The log events in the batch must be in chronological order by their timestamp. The timestamp is the time that the
1557
+ event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. (In AWS Tools for PowerShell
1558
+ and the AWS SDK for .NET, the timestamp is specified in .NET format: yyyy-mm-ddThh:mm:ss. For example,
1559
+ 2017-09-15T13:45:30.)
1560
+ - A batch of log events in a single request cannot span more than 24 hours. Otherwise, the operation fails.
1561
+ - Each log event can be no larger than 256 KB.
1562
+ - The maximum number of log events in a batch is 10,000.
1563
+ """
1564
+
1565
+
1566
+ ##
1567
+
1568
+
1569
+ @dc.dataclass(frozen=True)
1570
+ class AwsLogEvent(AwsDataclass):
1571
+ message: str
1572
+ timestamp: int # milliseconds UTC
1573
+
1574
+
1575
+ @dc.dataclass(frozen=True)
1576
+ class AwsPutLogEventsRequest(AwsDataclass):
1577
+ log_group_name: str
1578
+ log_stream_name: str
1579
+ log_events: ta.Sequence[AwsLogEvent]
1580
+ sequence_token: ta.Optional[str] = None
1581
+
1582
+
1583
+ @dc.dataclass(frozen=True)
1584
+ class AwsRejectedLogEventsInfo(AwsDataclass):
1585
+ expired_log_event_end_index: ta.Optional[int] = None
1586
+ too_new_log_event_start_index: ta.Optional[int] = None
1587
+ too_old_log_event_end_index: ta.Optional[int] = None
1588
+
1589
+
1590
+ @dc.dataclass(frozen=True)
1591
+ class AwsPutLogEventsResponse(AwsDataclass):
1592
+ next_sequence_token: ta.Optional[str] = None
1593
+ rejected_log_events_info: ta.Optional[AwsRejectedLogEventsInfo] = None
1594
+
1595
+ raw: ta.Optional[AwsDataclass.Raw] = None
1596
+
1597
+
1598
+ ##
1599
+
1600
+
1601
+ class AwsLogMessagePoster:
1602
+ """
1603
+ TODO:
1604
+ - max_items
1605
+ - max_bytes - manually build body
1606
+ - flush_interval
1607
+ - !! sort by timestamp
1608
+ """
1609
+
1610
+ DEFAULT_URL = 'https://logs.{region_name}.amazonaws.com/' # noqa
1611
+
1612
+ DEFAULT_SERVICE_NAME = 'logs'
1613
+
1614
+ DEFAULT_TARGET = 'Logs_20140328.PutLogEvents'
1615
+ DEFAULT_CONTENT_TYPE = 'application/x-amz-json-1.1'
1616
+
1617
+ DEFAULT_HEADERS: ta.Mapping[str, str] = {
1618
+ 'X-Amz-Target': DEFAULT_TARGET,
1619
+ 'Content-Type': DEFAULT_CONTENT_TYPE,
1620
+ }
1621
+
1622
+ def __init__(
1623
+ self,
1624
+ log_group_name: str,
1625
+ log_stream_name: str,
1626
+ region_name: str,
1627
+ credentials: AwsSigner.Credentials,
1628
+
1629
+ url: ta.Optional[str] = None,
1630
+ service_name: str = DEFAULT_SERVICE_NAME,
1631
+ headers: ta.Optional[ta.Mapping[str, str]] = None,
1632
+ extra_headers: ta.Optional[ta.Mapping[str, str]] = None,
1633
+ ) -> None:
1634
+ super().__init__()
1635
+
1636
+ self._log_group_name = check_non_empty_str(log_group_name)
1637
+ self._log_stream_name = check_non_empty_str(log_stream_name)
1638
+
1639
+ if url is None:
1640
+ url = self.DEFAULT_URL.format(region_name=region_name)
1641
+ self._url = url
1642
+
1643
+ if headers is None:
1644
+ headers = self.DEFAULT_HEADERS
1645
+ if extra_headers is not None:
1646
+ headers = {**headers, **extra_headers}
1647
+ self._headers = {k: [v] for k, v in headers.items()}
1648
+
1649
+ self._signer = V4AwsSigner(
1650
+ credentials,
1651
+ region_name,
1652
+ service_name,
1653
+ )
1654
+
1655
+ #
1656
+
1657
+ @dc.dataclass(frozen=True)
1658
+ class Message:
1659
+ message: str
1660
+ ts_ms: int # milliseconds UTC
1661
+
1662
+ @dc.dataclass(frozen=True)
1663
+ class Post:
1664
+ url: str
1665
+ headers: ta.Mapping[str, str]
1666
+ data: bytes
1667
+
1668
+ def feed(self, messages: ta.Sequence[Message]) -> ta.Sequence[Post]:
1669
+ if not messages:
1670
+ return []
1671
+
1672
+ payload = AwsPutLogEventsRequest(
1673
+ log_group_name=self._log_group_name,
1674
+ log_stream_name=self._log_stream_name,
1675
+ log_events=[
1676
+ AwsLogEvent(
1677
+ message=m.message,
1678
+ timestamp=m.ts_ms,
1679
+ )
1680
+ for m in messages
1681
+ ],
1682
+ )
1683
+
1684
+ body = json.dumps(
1685
+ payload.to_aws(),
1686
+ indent=None,
1687
+ separators=(',', ':'),
1688
+ ).encode('utf-8')
1689
+
1690
+ sig_req = V4AwsSigner.Request(
1691
+ method='POST',
1692
+ url=self._url,
1693
+ headers=self._headers,
1694
+ payload=body,
1695
+ )
1696
+
1697
+ sig_headers = self._signer.sign(
1698
+ sig_req,
1699
+ sign_payload=False,
1700
+ )
1701
+ sig_req = dc.replace(sig_req, headers={**sig_req.headers, **sig_headers})
1702
+
1703
+ post = AwsLogMessagePoster.Post(
1704
+ url=self._url,
1705
+ headers={k: check_single(v) for k, v in sig_req.headers.items()},
1706
+ data=sig_req.payload,
1707
+ )
1708
+
1709
+ return [post]
1710
+
1711
+
1712
+ ########################################
1713
+ # ../../../../../omlish/lite/subprocesses.py
1714
+
1715
+
1716
+ ##
1717
+
1718
+
1719
+ _SUBPROCESS_SHELL_WRAP_EXECS = False
1720
+
1721
+
1722
+ def subprocess_shell_wrap_exec(*args: str) -> ta.Tuple[str, ...]:
1723
+ return ('sh', '-c', ' '.join(map(shlex.quote, args)))
1724
+
1725
+
1726
+ def subprocess_maybe_shell_wrap_exec(*args: str) -> ta.Tuple[str, ...]:
1727
+ if _SUBPROCESS_SHELL_WRAP_EXECS or is_debugger_attached():
1728
+ return subprocess_shell_wrap_exec(*args)
1729
+ else:
1730
+ return args
1731
+
1732
+
1733
+ def _prepare_subprocess_invocation(
1734
+ *args: str,
1735
+ env: ta.Optional[ta.Mapping[str, ta.Any]] = None,
1736
+ extra_env: ta.Optional[ta.Mapping[str, ta.Any]] = None,
1737
+ quiet: bool = False,
1738
+ shell: bool = False,
1739
+ **kwargs: ta.Any,
1740
+ ) -> ta.Tuple[ta.Tuple[ta.Any, ...], ta.Dict[str, ta.Any]]:
1741
+ log.debug(args)
1742
+ if extra_env:
1743
+ log.debug(extra_env)
1744
+
1745
+ if extra_env:
1746
+ env = {**(env if env is not None else os.environ), **extra_env}
1747
+
1748
+ if quiet and 'stderr' not in kwargs:
1749
+ if not log.isEnabledFor(logging.DEBUG):
1750
+ kwargs['stderr'] = subprocess.DEVNULL
1751
+
1752
+ if not shell:
1753
+ args = subprocess_maybe_shell_wrap_exec(*args)
1754
+
1755
+ return args, dict(
1756
+ env=env,
1757
+ shell=shell,
1758
+ **kwargs,
1759
+ )
1760
+
1761
+
1762
+ def subprocess_check_call(*args: str, stdout=sys.stderr, **kwargs: ta.Any) -> None:
1763
+ args, kwargs = _prepare_subprocess_invocation(*args, stdout=stdout, **kwargs)
1764
+ return subprocess.check_call(args, **kwargs) # type: ignore
1765
+
1766
+
1767
+ def subprocess_check_output(*args: str, **kwargs: ta.Any) -> bytes:
1768
+ args, kwargs = _prepare_subprocess_invocation(*args, **kwargs)
1769
+ return subprocess.check_output(args, **kwargs)
1770
+
1771
+
1772
+ def subprocess_check_output_str(*args: str, **kwargs: ta.Any) -> str:
1773
+ return subprocess_check_output(*args, **kwargs).decode().strip()
1774
+
1775
+
1776
+ ##
1777
+
1778
+
1779
+ DEFAULT_SUBPROCESS_TRY_EXCEPTIONS: ta.Tuple[ta.Type[Exception], ...] = (
1780
+ FileNotFoundError,
1781
+ subprocess.CalledProcessError,
1782
+ )
1783
+
1784
+
1785
+ def subprocess_try_call(
1786
+ *args: str,
1787
+ try_exceptions: ta.Tuple[ta.Type[Exception], ...] = DEFAULT_SUBPROCESS_TRY_EXCEPTIONS,
1788
+ **kwargs: ta.Any,
1789
+ ) -> bool:
1790
+ try:
1791
+ subprocess_check_call(*args, **kwargs)
1792
+ except try_exceptions as e: # noqa
1793
+ if log.isEnabledFor(logging.DEBUG):
1794
+ log.exception('command failed')
1795
+ return False
1796
+ else:
1797
+ return True
1798
+
1799
+
1800
+ def subprocess_try_output(
1801
+ *args: str,
1802
+ try_exceptions: ta.Tuple[ta.Type[Exception], ...] = DEFAULT_SUBPROCESS_TRY_EXCEPTIONS,
1803
+ **kwargs: ta.Any,
1804
+ ) -> ta.Optional[bytes]:
1805
+ try:
1806
+ return subprocess_check_output(*args, **kwargs)
1807
+ except try_exceptions as e: # noqa
1808
+ if log.isEnabledFor(logging.DEBUG):
1809
+ log.exception('command failed')
1810
+ return None
1811
+
1812
+
1813
+ def subprocess_try_output_str(*args: str, **kwargs: ta.Any) -> ta.Optional[str]:
1814
+ out = subprocess_try_output(*args, **kwargs)
1815
+ return out.decode().strip() if out is not None else None
1816
+
1817
+
1818
+ ########################################
1819
+ # ../journald/tailer.py
1820
+
1821
+
1822
+ class JournalctlTailerWorker(ThreadWorker):
1823
+ DEFAULT_CMD: ta.ClassVar[ta.Sequence[str]] = ['journalctl']
1824
+
1825
+ def __init__(
1826
+ self,
1827
+ output, # type: queue.Queue[ta.Sequence[JournalctlMessage]]
1828
+ *,
1829
+ since: ta.Optional[str] = None,
1830
+ after_cursor: ta.Optional[str] = None,
1831
+
1832
+ cmd: ta.Optional[ta.Sequence[str]] = None,
1833
+ shell_wrap: bool = False,
1834
+
1835
+ read_size: int = 0x4000,
1836
+ sleep_s: float = 1.,
1837
+
1838
+ **kwargs: ta.Any,
1839
+ ) -> None:
1840
+ super().__init__(**kwargs)
1841
+
1842
+ self._output = output
1843
+
1844
+ self._since = since
1845
+ self._after_cursor = after_cursor
1846
+
1847
+ self._cmd = cmd or self.DEFAULT_CMD
1848
+ self._shell_wrap = shell_wrap
1849
+
1850
+ self._read_size = read_size
1851
+ self._sleep_s = sleep_s
1852
+
1853
+ self._mb = JournalctlMessageBuilder()
1854
+
1855
+ self._proc: ta.Optional[subprocess.Popen] = None
1856
+
1857
+ @cached_nullary
1858
+ def _full_cmd(self) -> ta.Sequence[str]:
1859
+ cmd = [
1860
+ *self._cmd,
1861
+ '--output', 'json',
1862
+ '--show-cursor',
1863
+ '--follow',
1864
+ ]
1865
+
1866
+ if self._since is not None:
1867
+ cmd.extend(['--since', self._since])
1868
+
1869
+ if self._after_cursor is not None:
1870
+ cmd.extend(['--after-cursor', self._after_cursor])
1871
+
1872
+ if self._shell_wrap:
1873
+ cmd = list(subprocess_shell_wrap_exec(*cmd))
1874
+
1875
+ return cmd
1876
+
1877
+ def _run(self) -> None:
1878
+ with subprocess.Popen(
1879
+ self._full_cmd(),
1880
+ stdout=subprocess.PIPE,
1881
+ ) as self._proc:
1882
+ stdout = check_not_none(self._proc.stdout)
1883
+
1884
+ fd = stdout.fileno()
1885
+ fl = fcntl.fcntl(fd, fcntl.F_GETFL)
1886
+ fcntl.fcntl(fd, fcntl.F_SETFL, fl | os.O_NONBLOCK)
1887
+
1888
+ while True:
1889
+ if not self._heartbeat():
1890
+ break
1891
+
1892
+ while stdout.readable():
1893
+ if not self._heartbeat():
1894
+ break
1895
+
1896
+ buf = stdout.read(self._read_size)
1897
+ if not buf:
1898
+ log.debug('Journalctl empty read')
1899
+ break
1900
+
1901
+ log.debug('Journalctl read buffer: %r', buf)
1902
+ msgs = self._mb.feed(buf)
1903
+ if msgs:
1904
+ self._output.put(msgs)
1905
+
1906
+ if self._proc.poll() is not None:
1907
+ log.critical('Journalctl process terminated')
1908
+ break
1909
+
1910
+ log.debug('Journalctl readable')
1911
+ time.sleep(self._sleep_s)
1912
+
1913
+
1914
+ ########################################
1915
+ # main.py
1916
+
1917
+
1918
+ @dc.dataclass(frozen=True)
1919
+ class JournalctlOpts:
1920
+ after_cursor: ta.Optional[str] = None
1921
+
1922
+ since: ta.Optional[str] = None
1923
+ until: ta.Optional[str] = None
1924
+
1925
+
1926
+ class JournalctlToAws:
1927
+ @dc.dataclass(frozen=True)
1928
+ class Config:
1929
+ pid_file: ta.Optional[str] = None
1930
+
1931
+ cursor_file: ta.Optional[str] = None
1932
+
1933
+ #
1934
+
1935
+ aws_log_group_name: str = 'omlish'
1936
+ aws_log_stream_name: ta.Optional[str] = None
1937
+
1938
+ aws_access_key_id: ta.Optional[str] = None
1939
+ aws_secret_access_key: ta.Optional[str] = dc.field(default=None, repr=False)
1940
+
1941
+ aws_region_name: str = 'us-west-1'
1942
+
1943
+ #
1944
+
1945
+ journalctl_cmd: ta.Optional[ta.Sequence[str]] = None
1946
+
1947
+ journalctl_after_cursor: ta.Optional[str] = None
1948
+ journalctl_since: ta.Optional[str] = None
1949
+
1950
+ #
1951
+
1952
+ dry_run: bool = False
1953
+
1954
+ def __init__(self, config: Config) -> None:
1955
+ super().__init__()
1956
+ self._config = config
1957
+
1958
+ #
1959
+
1960
+ _es: contextlib.ExitStack
1961
+
1962
+ def __enter__(self) -> 'JournalctlToAws':
1963
+ self._es = contextlib.ExitStack().__enter__()
1964
+ return self
1965
+
1966
+ def __exit__(self, exc_type, exc_val, exc_tb):
1967
+ return self._es.__exit__(exc_type, exc_val, exc_tb)
1968
+
1969
+ #
1970
+
1971
+ @cached_nullary
1972
+ def _pidfile(self) -> ta.Optional[Pidfile]:
1973
+ if self._config.pid_file is None:
1974
+ return None
1975
+
1976
+ pfp = os.path.expanduser(self._config.pid_file)
1977
+
1978
+ log.info('Opening pidfile %s', pfp)
1979
+
1980
+ pf = self._es.enter_context(Pidfile(pfp))
1981
+ pf.write()
1982
+ return pf
1983
+
1984
+ def _ensure_locked(self) -> None:
1985
+ if (pf := self._pidfile()) is not None:
1986
+ pf.ensure_locked()
1987
+
1988
+ #
1989
+
1990
+ def _read_cursor_file(self) -> ta.Optional[str]:
1991
+ self._ensure_locked()
1992
+
1993
+ if not (cf := self._config.cursor_file):
1994
+ return None
1995
+ cf = os.path.expanduser(cf)
1996
+
1997
+ try:
1998
+ with open(cf) as f:
1999
+ return f.read().strip()
2000
+ except FileNotFoundError:
2001
+ return None
2002
+
2003
+ def _write_cursor_file(self, cursor: str) -> None:
2004
+ self._ensure_locked()
2005
+
2006
+ if not (cf := self._config.cursor_file):
2007
+ return
2008
+ cf = os.path.expanduser(cf)
2009
+
2010
+ log.info('Writing cursor file %s : %s', cf, cursor)
2011
+ with open(ncf := cf + '.next', 'w') as f:
2012
+ f.write(cursor)
2013
+
2014
+ os.rename(ncf, cf)
2015
+
2016
+ #
2017
+
2018
+ @cached_nullary
2019
+ def _aws_credentials(self) -> AwsSigner.Credentials:
2020
+ return AwsSigner.Credentials(
2021
+ access_key_id=check_non_empty_str(self._config.aws_access_key_id),
2022
+ secret_access_key=check_non_empty_str(self._config.aws_secret_access_key),
2023
+ )
2024
+
2025
+ @cached_nullary
2026
+ def _aws_log_message_poster(self) -> AwsLogMessagePoster:
2027
+ return AwsLogMessagePoster(
2028
+ log_group_name=self._config.aws_log_group_name,
2029
+ log_stream_name=check_non_empty_str(self._config.aws_log_stream_name),
2030
+ region_name=self._config.aws_region_name,
2031
+ credentials=check_not_none(self._aws_credentials()),
2032
+ )
2033
+
2034
+ #
2035
+
2036
+ @cached_nullary
2037
+ def _journalctl_message_queue(self): # type: () -> queue.Queue[ta.Sequence[JournalctlMessage]]
2038
+ return queue.Queue()
2039
+
2040
+ @cached_nullary
2041
+ def _journalctl_tailer_worker(self) -> JournalctlTailerWorker:
2042
+ ac: ta.Optional[str] = self._config.journalctl_after_cursor
2043
+ if ac is None:
2044
+ ac = self._read_cursor_file()
2045
+ if ac is not None:
2046
+ log.info('Starting from cursor %s', ac)
2047
+
2048
+ if (since := self._config.journalctl_since):
2049
+ log.info('Starting since %s', since)
2050
+
2051
+ return JournalctlTailerWorker(
2052
+ self._journalctl_message_queue(),
2053
+
2054
+ since=since,
2055
+ after_cursor=ac,
2056
+
2057
+ cmd=self._config.journalctl_cmd,
2058
+ shell_wrap=is_debugger_attached(),
2059
+ )
2060
+
2061
+ #
2062
+
2063
+ def run(self) -> None:
2064
+ self._ensure_locked()
2065
+
2066
+ q = self._journalctl_message_queue()
2067
+ jtw = self._journalctl_tailer_worker()
2068
+ mp = self._aws_log_message_poster()
2069
+
2070
+ jtw.start()
2071
+
2072
+ last_cursor: ta.Optional[str] = None # noqa
2073
+ while True:
2074
+ if not jtw.is_alive():
2075
+ log.critical('Journalctl tailer worker died')
2076
+ break
2077
+
2078
+ msgs: ta.Sequence[JournalctlMessage] = q.get()
2079
+ log.debug('%r', msgs)
2080
+
2081
+ cur_cursor: ta.Optional[str] = None
2082
+ for m in reversed(msgs):
2083
+ if m.cursor is not None:
2084
+ cur_cursor = m.cursor
2085
+ break
2086
+
2087
+ if not msgs:
2088
+ log.warning('Empty queue chunk')
2089
+ continue
2090
+
2091
+ [post] = mp.feed([mp.Message(
2092
+ message=json.dumps(m.dct),
2093
+ ts_ms=int(time.time() * 1000.),
2094
+ ) for m in msgs])
2095
+ log.debug('%r', post)
2096
+
2097
+ if not self._config.dry_run:
2098
+ with urllib.request.urlopen(urllib.request.Request( # noqa
2099
+ post.url,
2100
+ method='POST',
2101
+ headers=dict(post.headers),
2102
+ data=post.data,
2103
+ )) as resp:
2104
+ response = AwsPutLogEventsResponse.from_aws(json.loads(resp.read().decode('utf-8')))
2105
+ log.debug('%r', response)
2106
+
2107
+ if cur_cursor is not None:
2108
+ self._write_cursor_file(cur_cursor)
2109
+ last_cursor = cur_cursor # noqa
2110
+
2111
+
2112
+ def _main() -> None:
2113
+ parser = argparse.ArgumentParser()
2114
+
2115
+ parser.add_argument('--config-file')
2116
+ parser.add_argument('-v', '--verbose', action='store_true')
2117
+
2118
+ parser.add_argument('--after-cursor', nargs='?')
2119
+ parser.add_argument('--since', nargs='?')
2120
+ parser.add_argument('--dry-run', action='store_true')
2121
+
2122
+ parser.add_argument('--message', nargs='?')
2123
+ parser.add_argument('--real', action='store_true')
2124
+
2125
+ args = parser.parse_args()
2126
+
2127
+ #
2128
+
2129
+ configure_standard_logging('DEBUG' if args.verbose else 'INFO')
2130
+
2131
+ #
2132
+
2133
+ config: JournalctlToAws.Config
2134
+ if args.config_file:
2135
+ with open(os.path.expanduser(args.config_file)) as cf:
2136
+ config_dct = json.load(cf)
2137
+ config = unmarshal_obj(config_dct, JournalctlToAws.Config)
2138
+ else:
2139
+ config = JournalctlToAws.Config()
2140
+
2141
+ #
2142
+
2143
+ for k in ['aws_access_key_id', 'aws_secret_access_key']:
2144
+ if not getattr(config, k) and k.upper() in os.environ:
2145
+ config = dc.replace(config, **{k: os.environ.get(k.upper())}) # type: ignore
2146
+
2147
+ #
2148
+
2149
+ if not args.real:
2150
+ config = dc.replace(config, journalctl_cmd=[
2151
+ sys.executable,
2152
+ os.path.join(os.path.dirname(__file__), 'journald', 'genmessages.py'),
2153
+ '--sleep-n', '2',
2154
+ '--sleep-s', '.5',
2155
+ *(['--message', args.message] if args.message else []),
2156
+ '100000',
2157
+ ])
2158
+
2159
+ #
2160
+
2161
+ for a in ['after_cursor', 'since', 'dry_run']:
2162
+ if (pa := getattr(args, a)):
2163
+ config = dc.replace(config, **{a: pa})
2164
+
2165
+ #
2166
+
2167
+ with JournalctlToAws(config) as jta:
2168
+ jta.run()
2169
+
2170
+
2171
+ if __name__ == '__main__':
2172
+ _main()