ominfra 0.0.0.dev87__py3-none-any.whl → 0.0.0.dev89__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,2166 @@
1
+ #!/usr/bin/env python3
2
+ # noinspection DuplicatedCode
3
+ # @omlish-lite
4
+ # @omlish-script
5
+ # @omlish-amalg-output ../clouds/aws/journald2aws/main.py
6
+ # ruff: noqa: N802 UP006 UP007 UP036
7
+ """
8
+ https://www.freedesktop.org/software/systemd/man/latest/journalctl.html
9
+
10
+ journalctl:
11
+ -o json
12
+ --show-cursor
13
+
14
+ --since "2012-10-30 18:17:16"
15
+ --until "2012-10-30 18:17:16"
16
+
17
+ --after-cursor <cursor>
18
+
19
+ ==
20
+
21
+ https://www.freedesktop.org/software/systemd/man/latest/systemd.journal-fields.html
22
+
23
+ ==
24
+
25
+ @dc.dataclass(frozen=True)
26
+ class Journald2AwsConfig:
27
+ log_group_name: str
28
+ log_stream_name: str
29
+
30
+ aws_batch_size: int = 1_000
31
+ aws_flush_interval_s: float = 1.
32
+ """
33
+ import abc
34
+ import argparse
35
+ import base64
36
+ import collections.abc
37
+ import contextlib
38
+ import dataclasses as dc
39
+ import datetime
40
+ import decimal
41
+ import enum
42
+ import fcntl
43
+ import fractions
44
+ import functools
45
+ import hashlib
46
+ import hmac
47
+ import inspect
48
+ import io
49
+ import json
50
+ import logging
51
+ import os
52
+ import os.path
53
+ import queue
54
+ import shlex
55
+ import signal
56
+ import subprocess
57
+ import sys
58
+ import threading
59
+ import time
60
+ import typing as ta
61
+ import urllib.parse
62
+ import urllib.request
63
+ import uuid
64
+ import weakref # noqa
65
+
66
+
67
+ ########################################
68
+
69
+
70
+ if sys.version_info < (3, 8):
71
+ raise OSError(
72
+ f'Requires python (3, 8), got {sys.version_info} from {sys.executable}') # noqa
73
+
74
+
75
+ ########################################
76
+
77
+
78
+ # ../../../../../omlish/lite/check.py
79
+ T = ta.TypeVar('T')
80
+
81
+
82
+ ########################################
83
+ # ../../../../../omlish/lite/cached.py
84
+
85
+
86
+ class cached_nullary: # noqa
87
+ def __init__(self, fn):
88
+ super().__init__()
89
+ self._fn = fn
90
+ self._value = self._missing = object()
91
+ functools.update_wrapper(self, fn)
92
+
93
+ def __call__(self, *args, **kwargs): # noqa
94
+ if self._value is self._missing:
95
+ self._value = self._fn()
96
+ return self._value
97
+
98
+ def __get__(self, instance, owner): # noqa
99
+ bound = instance.__dict__[self._fn.__name__] = self.__class__(self._fn.__get__(instance, owner))
100
+ return bound
101
+
102
+
103
+ ########################################
104
+ # ../../../../../omlish/lite/check.py
105
+
106
+
107
+ def check_isinstance(v: T, spec: ta.Union[ta.Type[T], tuple]) -> T:
108
+ if not isinstance(v, spec):
109
+ raise TypeError(v)
110
+ return v
111
+
112
+
113
+ def check_not_isinstance(v: T, spec: ta.Union[type, tuple]) -> T:
114
+ if isinstance(v, spec):
115
+ raise TypeError(v)
116
+ return v
117
+
118
+
119
+ def check_not_none(v: ta.Optional[T]) -> T:
120
+ if v is None:
121
+ raise ValueError
122
+ return v
123
+
124
+
125
+ def check_not(v: ta.Any) -> None:
126
+ if v:
127
+ raise ValueError(v)
128
+ return v
129
+
130
+
131
+ def check_non_empty_str(v: ta.Optional[str]) -> str:
132
+ if not v:
133
+ raise ValueError
134
+ return v
135
+
136
+
137
+ def check_state(v: bool, msg: str = 'Illegal state') -> None:
138
+ if not v:
139
+ raise ValueError(msg)
140
+
141
+
142
+ def check_equal(l: T, r: T) -> T:
143
+ if l != r:
144
+ raise ValueError(l, r)
145
+ return l
146
+
147
+
148
+ def check_not_equal(l: T, r: T) -> T:
149
+ if l == r:
150
+ raise ValueError(l, r)
151
+ return l
152
+
153
+
154
+ def check_single(vs: ta.Iterable[T]) -> T:
155
+ [v] = vs
156
+ return v
157
+
158
+
159
+ ########################################
160
+ # ../../../../../omlish/lite/json.py
161
+
162
+
163
+ ##
164
+
165
+
166
+ JSON_PRETTY_INDENT = 2
167
+
168
+ JSON_PRETTY_KWARGS: ta.Mapping[str, ta.Any] = dict(
169
+ indent=JSON_PRETTY_INDENT,
170
+ )
171
+
172
+ json_dump_pretty: ta.Callable[..., bytes] = functools.partial(json.dump, **JSON_PRETTY_KWARGS) # type: ignore
173
+ json_dumps_pretty: ta.Callable[..., str] = functools.partial(json.dumps, **JSON_PRETTY_KWARGS)
174
+
175
+
176
+ ##
177
+
178
+
179
+ JSON_COMPACT_SEPARATORS = (',', ':')
180
+
181
+ JSON_COMPACT_KWARGS: ta.Mapping[str, ta.Any] = dict(
182
+ indent=None,
183
+ separators=JSON_COMPACT_SEPARATORS,
184
+ )
185
+
186
+ json_dump_compact: ta.Callable[..., bytes] = functools.partial(json.dump, **JSON_COMPACT_KWARGS) # type: ignore
187
+ json_dumps_compact: ta.Callable[..., str] = functools.partial(json.dumps, **JSON_COMPACT_KWARGS)
188
+
189
+
190
+ ########################################
191
+ # ../../../../../omlish/lite/pidfile.py
192
+
193
+
194
+ class Pidfile:
195
+ def __init__(self, path: str) -> None:
196
+ super().__init__()
197
+ self._path = path
198
+
199
+ _f: ta.TextIO
200
+
201
+ def __repr__(self) -> str:
202
+ return f'{self.__class__.__name__}({self._path!r})'
203
+
204
+ def __enter__(self) -> 'Pidfile':
205
+ fd = os.open(self._path, os.O_RDWR | os.O_CREAT, 0o600)
206
+ try:
207
+ os.set_inheritable(fd, True)
208
+ f = os.fdopen(fd, 'r+')
209
+ except Exception:
210
+ try:
211
+ os.close(fd)
212
+ except Exception: # noqa
213
+ pass
214
+ raise
215
+ self._f = f
216
+ return self
217
+
218
+ def __exit__(self, exc_type, exc_val, exc_tb):
219
+ if self._f is not None:
220
+ self._f.close()
221
+ del self._f
222
+
223
+ def try_lock(self) -> bool:
224
+ try:
225
+ fcntl.flock(self._f, fcntl.LOCK_EX | fcntl.LOCK_NB)
226
+ return True
227
+ except OSError:
228
+ return False
229
+
230
+ def ensure_locked(self) -> None:
231
+ if not self.try_lock():
232
+ raise RuntimeError('Could not get lock')
233
+
234
+ def write(self, pid: ta.Optional[int] = None) -> None:
235
+ self.ensure_locked()
236
+ if pid is None:
237
+ pid = os.getpid()
238
+ self._f.write(f'{pid}\n')
239
+ self._f.flush()
240
+
241
+ def clear(self) -> None:
242
+ self.ensure_locked()
243
+ self._f.seek(0)
244
+ self._f.truncate()
245
+
246
+ def read(self) -> int:
247
+ if self.try_lock():
248
+ raise RuntimeError('Got lock')
249
+ self._f.seek(0)
250
+ return int(self._f.read())
251
+
252
+ def kill(self, sig: int = signal.SIGTERM) -> None:
253
+ pid = self.read()
254
+ os.kill(pid, sig) # FIXME: Still racy
255
+
256
+
257
+ ########################################
258
+ # ../../../../../omlish/lite/reflect.py
259
+
260
+
261
+ _GENERIC_ALIAS_TYPES = (
262
+ ta._GenericAlias, # type: ignore # noqa
263
+ *([ta._SpecialGenericAlias] if hasattr(ta, '_SpecialGenericAlias') else []), # noqa
264
+ )
265
+
266
+
267
+ def is_generic_alias(obj, *, origin: ta.Any = None) -> bool:
268
+ return (
269
+ isinstance(obj, _GENERIC_ALIAS_TYPES) and
270
+ (origin is None or ta.get_origin(obj) is origin)
271
+ )
272
+
273
+
274
+ is_union_alias = functools.partial(is_generic_alias, origin=ta.Union)
275
+ is_callable_alias = functools.partial(is_generic_alias, origin=ta.Callable)
276
+
277
+
278
+ def is_optional_alias(spec: ta.Any) -> bool:
279
+ return (
280
+ isinstance(spec, _GENERIC_ALIAS_TYPES) and # noqa
281
+ ta.get_origin(spec) is ta.Union and
282
+ len(ta.get_args(spec)) == 2 and
283
+ any(a in (None, type(None)) for a in ta.get_args(spec))
284
+ )
285
+
286
+
287
+ def get_optional_alias_arg(spec: ta.Any) -> ta.Any:
288
+ [it] = [it for it in ta.get_args(spec) if it not in (None, type(None))]
289
+ return it
290
+
291
+
292
+ def deep_subclasses(cls: ta.Type[T]) -> ta.Iterator[ta.Type[T]]:
293
+ seen = set()
294
+ todo = list(reversed(cls.__subclasses__()))
295
+ while todo:
296
+ cur = todo.pop()
297
+ if cur in seen:
298
+ continue
299
+ seen.add(cur)
300
+ yield cur
301
+ todo.extend(reversed(cur.__subclasses__()))
302
+
303
+
304
+ ########################################
305
+ # ../../../../../omlish/lite/strings.py
306
+
307
+
308
+ def camel_case(name: str, lower: bool = False) -> str:
309
+ if not name:
310
+ return ''
311
+ s = ''.join(map(str.capitalize, name.split('_'))) # noqa
312
+ if lower:
313
+ s = s[0].lower() + s[1:]
314
+ return s
315
+
316
+
317
+ def snake_case(name: str) -> str:
318
+ uppers: list[int | None] = [i for i, c in enumerate(name) if c.isupper()]
319
+ return '_'.join([name[l:r].lower() for l, r in zip([None, *uppers], [*uppers, None])]).strip('_')
320
+
321
+
322
+ def is_dunder(name: str) -> bool:
323
+ return (
324
+ name[:2] == name[-2:] == '__' and
325
+ name[2:3] != '_' and
326
+ name[-3:-2] != '_' and
327
+ len(name) > 4
328
+ )
329
+
330
+
331
+ def is_sunder(name: str) -> bool:
332
+ return (
333
+ name[0] == name[-1] == '_' and
334
+ name[1:2] != '_' and
335
+ name[-2:-1] != '_' and
336
+ len(name) > 2
337
+ )
338
+
339
+
340
+ def attr_repr(obj: ta.Any, *attrs: str) -> str:
341
+ return f'{type(obj).__name__}({", ".join(f"{attr}={getattr(obj, attr)!r}" for attr in attrs)})'
342
+
343
+
344
+ ########################################
345
+ # ../../auth.py
346
+ """
347
+ https://docs.aws.amazon.com/IAM/latest/UserGuide/create-signed-request.html
348
+
349
+ TODO:
350
+ - https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-streaming.html
351
+ - boto / s3transfer upload_fileobj doesn't stream either lol - eagerly calcs Content-MD5
352
+ - sts tokens
353
+ - !! fix canonical_qs - sort params
354
+ - secrets
355
+ """
356
+
357
+
358
+ ##
359
+
360
+
361
+ class AwsSigner:
362
+ def __init__(
363
+ self,
364
+ creds: 'AwsSigner.Credentials',
365
+ region_name: str,
366
+ service_name: str,
367
+ ) -> None:
368
+ super().__init__()
369
+ self._creds = creds
370
+ self._region_name = region_name
371
+ self._service_name = service_name
372
+
373
+ #
374
+
375
+ @dc.dataclass(frozen=True)
376
+ class Credentials:
377
+ access_key_id: str
378
+ secret_access_key: str = dc.field(repr=False)
379
+
380
+ @dc.dataclass(frozen=True)
381
+ class Request:
382
+ method: str
383
+ url: str
384
+ headers: ta.Mapping[str, ta.Sequence[str]] = dc.field(default_factory=dict)
385
+ payload: bytes = b''
386
+
387
+ #
388
+
389
+ ISO8601 = '%Y%m%dT%H%M%SZ'
390
+
391
+ #
392
+
393
+ @staticmethod
394
+ def _host_from_url(url: str) -> str:
395
+ url_parts = urllib.parse.urlsplit(url)
396
+ host = check_non_empty_str(url_parts.hostname)
397
+ default_ports = {
398
+ 'http': 80,
399
+ 'https': 443,
400
+ }
401
+ if url_parts.port is not None:
402
+ if url_parts.port != default_ports.get(url_parts.scheme):
403
+ host = '%s:%d' % (host, url_parts.port)
404
+ return host
405
+
406
+ @staticmethod
407
+ def _lower_case_http_map(d: ta.Mapping[str, ta.Sequence[str]]) -> ta.Mapping[str, ta.Sequence[str]]:
408
+ o: ta.Dict[str, ta.List[str]] = {}
409
+ for k, vs in d.items():
410
+ o.setdefault(k.lower(), []).extend(check_not_isinstance(vs, str))
411
+ return o
412
+
413
+ #
414
+
415
+ @staticmethod
416
+ def _as_bytes(data: ta.Union[str, bytes]) -> bytes:
417
+ return data if isinstance(data, bytes) else data.encode('utf-8')
418
+
419
+ @staticmethod
420
+ def _sha256(data: ta.Union[str, bytes]) -> str:
421
+ return hashlib.sha256(AwsSigner._as_bytes(data)).hexdigest()
422
+
423
+ @staticmethod
424
+ def _sha256_sign(key: bytes, msg: ta.Union[str, bytes]) -> bytes:
425
+ return hmac.new(key, AwsSigner._as_bytes(msg), hashlib.sha256).digest()
426
+
427
+ @staticmethod
428
+ def _sha256_sign_hex(key: bytes, msg: ta.Union[str, bytes]) -> str:
429
+ return hmac.new(key, AwsSigner._as_bytes(msg), hashlib.sha256).hexdigest()
430
+
431
+ _EMPTY_SHA256: str
432
+
433
+ #
434
+
435
+ _SIGNED_HEADERS_BLACKLIST = frozenset([
436
+ 'authorization',
437
+ 'expect',
438
+ 'user-agent',
439
+ 'x-amzn-trace-id',
440
+ ])
441
+
442
+ def _validate_request(self, req: Request) -> None:
443
+ check_non_empty_str(req.method)
444
+ check_equal(req.method.upper(), req.method)
445
+ for k, vs in req.headers.items():
446
+ check_equal(k.strip(), k)
447
+ for v in vs:
448
+ check_equal(v.strip(), v)
449
+
450
+
451
+ AwsSigner._EMPTY_SHA256 = AwsSigner._sha256(b'') # noqa
452
+
453
+
454
+ ##
455
+
456
+
457
+ class V4AwsSigner(AwsSigner):
458
+ def sign(
459
+ self,
460
+ req: AwsSigner.Request,
461
+ *,
462
+ sign_payload: bool = False,
463
+ utcnow: ta.Optional[datetime.datetime] = None,
464
+ ) -> ta.Mapping[str, ta.Sequence[str]]:
465
+ self._validate_request(req)
466
+
467
+ #
468
+
469
+ if utcnow is None:
470
+ utcnow = datetime.datetime.now(tz=datetime.timezone.utc) # noqa
471
+ req_dt = utcnow.strftime(self.ISO8601)
472
+
473
+ #
474
+
475
+ parsed_url = urllib.parse.urlsplit(req.url)
476
+ canon_uri = parsed_url.path
477
+ canon_qs = parsed_url.query
478
+
479
+ #
480
+
481
+ headers_to_sign: ta.Dict[str, ta.List[str]] = {
482
+ k: list(v)
483
+ for k, v in self._lower_case_http_map(req.headers).items()
484
+ if k not in self._SIGNED_HEADERS_BLACKLIST
485
+ }
486
+
487
+ if 'host' not in headers_to_sign:
488
+ headers_to_sign['host'] = [self._host_from_url(req.url)]
489
+
490
+ headers_to_sign['x-amz-date'] = [req_dt]
491
+
492
+ hashed_payload = self._sha256(req.payload) if req.payload else self._EMPTY_SHA256
493
+ if sign_payload:
494
+ headers_to_sign['x-amz-content-sha256'] = [hashed_payload]
495
+
496
+ sorted_header_names = sorted(headers_to_sign)
497
+ canon_headers = ''.join([
498
+ ':'.join((k, ','.join(headers_to_sign[k]))) + '\n'
499
+ for k in sorted_header_names
500
+ ])
501
+ signed_headers = ';'.join(sorted_header_names)
502
+
503
+ #
504
+
505
+ canon_req = '\n'.join([
506
+ req.method,
507
+ canon_uri,
508
+ canon_qs,
509
+ canon_headers,
510
+ signed_headers,
511
+ hashed_payload,
512
+ ])
513
+
514
+ #
515
+
516
+ algorithm = 'AWS4-HMAC-SHA256'
517
+ scope_parts = [
518
+ req_dt[:8],
519
+ self._region_name,
520
+ self._service_name,
521
+ 'aws4_request',
522
+ ]
523
+ scope = '/'.join(scope_parts)
524
+ hashed_canon_req = self._sha256(canon_req)
525
+ string_to_sign = '\n'.join([
526
+ algorithm,
527
+ req_dt,
528
+ scope,
529
+ hashed_canon_req,
530
+ ])
531
+
532
+ #
533
+
534
+ key = self._creds.secret_access_key
535
+ key_date = self._sha256_sign(f'AWS4{key}'.encode('utf-8'), req_dt[:8]) # noqa
536
+ key_region = self._sha256_sign(key_date, self._region_name)
537
+ key_service = self._sha256_sign(key_region, self._service_name)
538
+ key_signing = self._sha256_sign(key_service, 'aws4_request')
539
+ sig = self._sha256_sign_hex(key_signing, string_to_sign)
540
+
541
+ #
542
+
543
+ cred_scope = '/'.join([
544
+ self._creds.access_key_id,
545
+ *scope_parts,
546
+ ])
547
+ auth = f'{algorithm} ' + ', '.join([
548
+ f'Credential={cred_scope}',
549
+ f'SignedHeaders={signed_headers}',
550
+ f'Signature={sig}',
551
+ ])
552
+
553
+ #
554
+
555
+ out = {
556
+ 'Authorization': [auth],
557
+ 'X-Amz-Date': [req_dt],
558
+ }
559
+ if sign_payload:
560
+ out['X-Amz-Content-SHA256'] = [hashed_payload]
561
+ return out
562
+
563
+
564
+ ########################################
565
+ # ../../dataclasses.py
566
+
567
+
568
+ class AwsDataclass:
569
+ class Raw(dict):
570
+ pass
571
+
572
+ #
573
+
574
+ _aws_meta: ta.ClassVar[ta.Optional['AwsDataclassMeta']] = None
575
+
576
+ @classmethod
577
+ def _get_aws_meta(cls) -> 'AwsDataclassMeta':
578
+ try:
579
+ return cls.__dict__['_aws_meta']
580
+ except KeyError:
581
+ pass
582
+ ret = cls._aws_meta = AwsDataclassMeta(cls)
583
+ return ret
584
+
585
+ #
586
+
587
+ def to_aws(self) -> ta.Mapping[str, ta.Any]:
588
+ return self._get_aws_meta().converters().d2a(self)
589
+
590
+ @classmethod
591
+ def from_aws(cls, v: ta.Mapping[str, ta.Any]) -> 'AwsDataclass':
592
+ return cls._get_aws_meta().converters().a2d(v)
593
+
594
+
595
+ @dc.dataclass(frozen=True)
596
+ class AwsDataclassMeta:
597
+ cls: ta.Type['AwsDataclass']
598
+
599
+ #
600
+
601
+ class Field(ta.NamedTuple):
602
+ d_name: str
603
+ a_name: str
604
+ is_opt: bool
605
+ is_seq: bool
606
+ dc_cls: ta.Optional[ta.Type['AwsDataclass']]
607
+
608
+ @cached_nullary
609
+ def fields(self) -> ta.Sequence[Field]:
610
+ fs = []
611
+ for f in dc.fields(self.cls): # type: ignore # noqa
612
+ d_name = f.name
613
+ a_name = camel_case(d_name, lower=True)
614
+
615
+ is_opt = False
616
+ is_seq = False
617
+ dc_cls = None
618
+
619
+ c = f.type
620
+ if c is AwsDataclass.Raw:
621
+ continue
622
+
623
+ if is_optional_alias(c):
624
+ is_opt = True
625
+ c = get_optional_alias_arg(c)
626
+
627
+ if is_generic_alias(c) and ta.get_origin(c) is collections.abc.Sequence:
628
+ is_seq = True
629
+ [c] = ta.get_args(c)
630
+
631
+ if is_generic_alias(c):
632
+ raise TypeError(c)
633
+
634
+ if isinstance(c, type) and issubclass(c, AwsDataclass):
635
+ dc_cls = c
636
+
637
+ fs.append(AwsDataclassMeta.Field(
638
+ d_name=d_name,
639
+ a_name=a_name,
640
+ is_opt=is_opt,
641
+ is_seq=is_seq,
642
+ dc_cls=dc_cls,
643
+ ))
644
+
645
+ return fs
646
+
647
+ #
648
+
649
+ class Converters(ta.NamedTuple):
650
+ d2a: ta.Callable
651
+ a2d: ta.Callable
652
+
653
+ @cached_nullary
654
+ def converters(self) -> Converters:
655
+ for df in dc.fields(self.cls): # type: ignore # noqa
656
+ c = df.type
657
+
658
+ if is_optional_alias(c):
659
+ c = get_optional_alias_arg(c)
660
+
661
+ if c is AwsDataclass.Raw:
662
+ rf = df.name
663
+ break
664
+
665
+ else:
666
+ rf = None
667
+
668
+ fs = [
669
+ (f, f.dc_cls._get_aws_meta().converters() if f.dc_cls is not None else None) # noqa
670
+ for f in self.fields()
671
+ ]
672
+
673
+ def d2a(o):
674
+ dct = {}
675
+ for f, cs in fs:
676
+ x = getattr(o, f.d_name)
677
+ if x is None:
678
+ continue
679
+ if cs is not None:
680
+ if f.is_seq:
681
+ x = list(map(cs.d2a, x))
682
+ else:
683
+ x = cs.d2a(x)
684
+ dct[f.a_name] = x
685
+ return dct
686
+
687
+ def a2d(v):
688
+ dct = {}
689
+ for f, cs in fs:
690
+ x = v.get(f.a_name)
691
+ if x is None:
692
+ continue
693
+ if cs is not None:
694
+ if f.is_seq:
695
+ x = list(map(cs.a2d, x))
696
+ else:
697
+ x = cs.a2d(x)
698
+ dct[f.d_name] = x
699
+ if rf is not None:
700
+ dct[rf] = self.cls.Raw(v)
701
+ return self.cls(**dct)
702
+
703
+ return AwsDataclassMeta.Converters(d2a, a2d)
704
+
705
+
706
+ ########################################
707
+ # ../../../../../omlish/lite/io.py
708
+
709
+
710
+ class DelimitingBuffer:
711
+ """
712
+ https://github.com/python-trio/trio/issues/796 :|
713
+ """
714
+
715
+ #
716
+
717
+ class Error(Exception):
718
+ def __init__(self, buffer: 'DelimitingBuffer') -> None:
719
+ super().__init__(buffer)
720
+ self.buffer = buffer
721
+
722
+ def __repr__(self) -> str:
723
+ return attr_repr(self, 'buffer')
724
+
725
+ class ClosedError(Error):
726
+ pass
727
+
728
+ #
729
+
730
+ DEFAULT_DELIMITERS: bytes = b'\n'
731
+
732
+ def __init__(
733
+ self,
734
+ delimiters: ta.Iterable[int] = DEFAULT_DELIMITERS,
735
+ *,
736
+ keep_ends: bool = False,
737
+ max_size: ta.Optional[int] = None,
738
+ ) -> None:
739
+ super().__init__()
740
+
741
+ self._delimiters = frozenset(check_isinstance(d, int) for d in delimiters)
742
+ self._keep_ends = keep_ends
743
+ self._max_size = max_size
744
+
745
+ self._buf: ta.Optional[io.BytesIO] = io.BytesIO()
746
+
747
+ #
748
+
749
+ @property
750
+ def is_closed(self) -> bool:
751
+ return self._buf is None
752
+
753
+ def tell(self) -> int:
754
+ if (buf := self._buf) is None:
755
+ raise self.ClosedError(self)
756
+ return buf.tell()
757
+
758
+ def peek(self) -> bytes:
759
+ if (buf := self._buf) is None:
760
+ raise self.ClosedError(self)
761
+ return buf.getvalue()
762
+
763
+ def _find_delim(self, data: ta.Union[bytes, bytearray], i: int) -> ta.Optional[int]:
764
+ r = None # type: int | None
765
+ for d in self._delimiters:
766
+ if (p := data.find(d, i)) >= 0:
767
+ if r is None or p < r:
768
+ r = p
769
+ return r
770
+
771
+ def _append_and_reset(self, chunk: bytes) -> bytes:
772
+ buf = check_not_none(self._buf)
773
+ if not buf.tell():
774
+ return chunk
775
+
776
+ buf.write(chunk)
777
+ ret = buf.getvalue()
778
+ buf.seek(0)
779
+ buf.truncate()
780
+ return ret
781
+
782
+ class Incomplete(ta.NamedTuple):
783
+ b: bytes
784
+
785
+ def feed(self, data: ta.Union[bytes, bytearray]) -> ta.Generator[ta.Union[bytes, Incomplete], None, None]:
786
+ if (buf := self._buf) is None:
787
+ raise self.ClosedError(self)
788
+
789
+ if not data:
790
+ self._buf = None
791
+
792
+ if buf.tell():
793
+ yield self.Incomplete(buf.getvalue())
794
+
795
+ return
796
+
797
+ l = len(data)
798
+ i = 0
799
+ while i < l:
800
+ if (p := self._find_delim(data, i)) is None:
801
+ break
802
+
803
+ n = p + 1
804
+ if self._keep_ends:
805
+ p = n
806
+
807
+ yield self._append_and_reset(data[i:p])
808
+
809
+ i = n
810
+
811
+ if i >= l:
812
+ return
813
+
814
+ if self._max_size is None:
815
+ buf.write(data[i:])
816
+ return
817
+
818
+ while i < l:
819
+ remaining_data_len = l - i
820
+ remaining_buf_capacity = self._max_size - buf.tell()
821
+
822
+ if remaining_data_len < remaining_buf_capacity:
823
+ buf.write(data[i:])
824
+ return
825
+
826
+ p = i + remaining_buf_capacity
827
+ yield self.Incomplete(self._append_and_reset(data[i:p]))
828
+ i = p
829
+
830
+
831
+ ########################################
832
+ # ../../../../../omlish/lite/logs.py
833
+ """
834
+ TODO:
835
+ - translate json keys
836
+ - debug
837
+ """
838
+
839
+
840
+ log = logging.getLogger(__name__)
841
+
842
+
843
+ ##
844
+
845
+
846
+ class TidLogFilter(logging.Filter):
847
+
848
+ def filter(self, record):
849
+ record.tid = threading.get_native_id()
850
+ return True
851
+
852
+
853
+ ##
854
+
855
+
856
+ class JsonLogFormatter(logging.Formatter):
857
+
858
+ KEYS: ta.Mapping[str, bool] = {
859
+ 'name': False,
860
+ 'msg': False,
861
+ 'args': False,
862
+ 'levelname': False,
863
+ 'levelno': False,
864
+ 'pathname': False,
865
+ 'filename': False,
866
+ 'module': False,
867
+ 'exc_info': True,
868
+ 'exc_text': True,
869
+ 'stack_info': True,
870
+ 'lineno': False,
871
+ 'funcName': False,
872
+ 'created': False,
873
+ 'msecs': False,
874
+ 'relativeCreated': False,
875
+ 'thread': False,
876
+ 'threadName': False,
877
+ 'processName': False,
878
+ 'process': False,
879
+ }
880
+
881
+ def format(self, record: logging.LogRecord) -> str:
882
+ dct = {
883
+ k: v
884
+ for k, o in self.KEYS.items()
885
+ for v in [getattr(record, k)]
886
+ if not (o and v is None)
887
+ }
888
+ return json_dumps_compact(dct)
889
+
890
+
891
+ ##
892
+
893
+
894
+ STANDARD_LOG_FORMAT_PARTS = [
895
+ ('asctime', '%(asctime)-15s'),
896
+ ('process', 'pid=%(process)-6s'),
897
+ ('thread', 'tid=%(thread)x'),
898
+ ('levelname', '%(levelname)s'),
899
+ ('name', '%(name)s'),
900
+ ('separator', '::'),
901
+ ('message', '%(message)s'),
902
+ ]
903
+
904
+
905
+ class StandardLogFormatter(logging.Formatter):
906
+
907
+ @staticmethod
908
+ def build_log_format(parts: ta.Iterable[ta.Tuple[str, str]]) -> str:
909
+ return ' '.join(v for k, v in parts)
910
+
911
+ converter = datetime.datetime.fromtimestamp # type: ignore
912
+
913
+ def formatTime(self, record, datefmt=None):
914
+ ct = self.converter(record.created) # type: ignore
915
+ if datefmt:
916
+ return ct.strftime(datefmt) # noqa
917
+ else:
918
+ t = ct.strftime("%Y-%m-%d %H:%M:%S") # noqa
919
+ return '%s.%03d' % (t, record.msecs)
920
+
921
+
922
+ ##
923
+
924
+
925
+ class ProxyLogFilterer(logging.Filterer):
926
+ def __init__(self, underlying: logging.Filterer) -> None: # noqa
927
+ self._underlying = underlying
928
+
929
+ @property
930
+ def underlying(self) -> logging.Filterer:
931
+ return self._underlying
932
+
933
+ @property
934
+ def filters(self):
935
+ return self._underlying.filters
936
+
937
+ @filters.setter
938
+ def filters(self, filters):
939
+ self._underlying.filters = filters
940
+
941
+ def addFilter(self, filter): # noqa
942
+ self._underlying.addFilter(filter)
943
+
944
+ def removeFilter(self, filter): # noqa
945
+ self._underlying.removeFilter(filter)
946
+
947
+ def filter(self, record):
948
+ return self._underlying.filter(record)
949
+
950
+
951
+ class ProxyLogHandler(ProxyLogFilterer, logging.Handler):
952
+ def __init__(self, underlying: logging.Handler) -> None: # noqa
953
+ ProxyLogFilterer.__init__(self, underlying)
954
+
955
+ _underlying: logging.Handler
956
+
957
+ @property
958
+ def underlying(self) -> logging.Handler:
959
+ return self._underlying
960
+
961
+ def get_name(self):
962
+ return self._underlying.get_name()
963
+
964
+ def set_name(self, name):
965
+ self._underlying.set_name(name)
966
+
967
+ @property
968
+ def name(self):
969
+ return self._underlying.name
970
+
971
+ @property
972
+ def level(self):
973
+ return self._underlying.level
974
+
975
+ @level.setter
976
+ def level(self, level):
977
+ self._underlying.level = level
978
+
979
+ @property
980
+ def formatter(self):
981
+ return self._underlying.formatter
982
+
983
+ @formatter.setter
984
+ def formatter(self, formatter):
985
+ self._underlying.formatter = formatter
986
+
987
+ def createLock(self):
988
+ self._underlying.createLock()
989
+
990
+ def acquire(self):
991
+ self._underlying.acquire()
992
+
993
+ def release(self):
994
+ self._underlying.release()
995
+
996
+ def setLevel(self, level):
997
+ self._underlying.setLevel(level)
998
+
999
+ def format(self, record):
1000
+ return self._underlying.format(record)
1001
+
1002
+ def emit(self, record):
1003
+ self._underlying.emit(record)
1004
+
1005
+ def handle(self, record):
1006
+ return self._underlying.handle(record)
1007
+
1008
+ def setFormatter(self, fmt):
1009
+ self._underlying.setFormatter(fmt)
1010
+
1011
+ def flush(self):
1012
+ self._underlying.flush()
1013
+
1014
+ def close(self):
1015
+ self._underlying.close()
1016
+
1017
+ def handleError(self, record):
1018
+ self._underlying.handleError(record)
1019
+
1020
+
1021
+ ##
1022
+
1023
+
1024
+ class StandardLogHandler(ProxyLogHandler):
1025
+ pass
1026
+
1027
+
1028
+ ##
1029
+
1030
+
1031
+ @contextlib.contextmanager
1032
+ def _locking_logging_module_lock() -> ta.Iterator[None]:
1033
+ if hasattr(logging, '_acquireLock'):
1034
+ logging._acquireLock() # noqa
1035
+ try:
1036
+ yield
1037
+ finally:
1038
+ logging._releaseLock() # type: ignore # noqa
1039
+
1040
+ elif hasattr(logging, '_lock'):
1041
+ # https://github.com/python/cpython/commit/74723e11109a320e628898817ab449b3dad9ee96
1042
+ with logging._lock: # noqa
1043
+ yield
1044
+
1045
+ else:
1046
+ raise Exception("Can't find lock in logging module")
1047
+
1048
+
1049
+ def configure_standard_logging(
1050
+ level: ta.Union[int, str] = logging.INFO,
1051
+ *,
1052
+ json: bool = False,
1053
+ target: ta.Optional[logging.Logger] = None,
1054
+ force: bool = False,
1055
+ ) -> ta.Optional[StandardLogHandler]:
1056
+ with _locking_logging_module_lock():
1057
+ if target is None:
1058
+ target = logging.root
1059
+
1060
+ #
1061
+
1062
+ if not force:
1063
+ if any(isinstance(h, StandardLogHandler) for h in list(target.handlers)):
1064
+ return None
1065
+
1066
+ #
1067
+
1068
+ handler = logging.StreamHandler()
1069
+
1070
+ #
1071
+
1072
+ formatter: logging.Formatter
1073
+ if json:
1074
+ formatter = JsonLogFormatter()
1075
+ else:
1076
+ formatter = StandardLogFormatter(StandardLogFormatter.build_log_format(STANDARD_LOG_FORMAT_PARTS))
1077
+ handler.setFormatter(formatter)
1078
+
1079
+ #
1080
+
1081
+ handler.addFilter(TidLogFilter())
1082
+
1083
+ #
1084
+
1085
+ target.addHandler(handler)
1086
+
1087
+ #
1088
+
1089
+ if level is not None:
1090
+ target.setLevel(level)
1091
+
1092
+ #
1093
+
1094
+ return StandardLogHandler(handler)
1095
+
1096
+
1097
+ ########################################
1098
+ # ../../../../../omlish/lite/marshal.py
1099
+ """
1100
+ TODO:
1101
+ - pickle stdlib objs? have to pin to 3.8 pickle protocol, will be cross-version
1102
+ - nonstrict toggle
1103
+ """
1104
+
1105
+
1106
+ ##
1107
+
1108
+
1109
+ class ObjMarshaler(abc.ABC):
1110
+ @abc.abstractmethod
1111
+ def marshal(self, o: ta.Any) -> ta.Any:
1112
+ raise NotImplementedError
1113
+
1114
+ @abc.abstractmethod
1115
+ def unmarshal(self, o: ta.Any) -> ta.Any:
1116
+ raise NotImplementedError
1117
+
1118
+
1119
+ class NopObjMarshaler(ObjMarshaler):
1120
+ def marshal(self, o: ta.Any) -> ta.Any:
1121
+ return o
1122
+
1123
+ def unmarshal(self, o: ta.Any) -> ta.Any:
1124
+ return o
1125
+
1126
+
1127
+ @dc.dataclass()
1128
+ class ProxyObjMarshaler(ObjMarshaler):
1129
+ m: ta.Optional[ObjMarshaler] = None
1130
+
1131
+ def marshal(self, o: ta.Any) -> ta.Any:
1132
+ return check_not_none(self.m).marshal(o)
1133
+
1134
+ def unmarshal(self, o: ta.Any) -> ta.Any:
1135
+ return check_not_none(self.m).unmarshal(o)
1136
+
1137
+
1138
+ @dc.dataclass(frozen=True)
1139
+ class CastObjMarshaler(ObjMarshaler):
1140
+ ty: type
1141
+
1142
+ def marshal(self, o: ta.Any) -> ta.Any:
1143
+ return o
1144
+
1145
+ def unmarshal(self, o: ta.Any) -> ta.Any:
1146
+ return self.ty(o)
1147
+
1148
+
1149
+ class DynamicObjMarshaler(ObjMarshaler):
1150
+ def marshal(self, o: ta.Any) -> ta.Any:
1151
+ return marshal_obj(o)
1152
+
1153
+ def unmarshal(self, o: ta.Any) -> ta.Any:
1154
+ return o
1155
+
1156
+
1157
+ @dc.dataclass(frozen=True)
1158
+ class Base64ObjMarshaler(ObjMarshaler):
1159
+ ty: type
1160
+
1161
+ def marshal(self, o: ta.Any) -> ta.Any:
1162
+ return base64.b64encode(o).decode('ascii')
1163
+
1164
+ def unmarshal(self, o: ta.Any) -> ta.Any:
1165
+ return self.ty(base64.b64decode(o))
1166
+
1167
+
1168
+ @dc.dataclass(frozen=True)
1169
+ class EnumObjMarshaler(ObjMarshaler):
1170
+ ty: type
1171
+
1172
+ def marshal(self, o: ta.Any) -> ta.Any:
1173
+ return o.name
1174
+
1175
+ def unmarshal(self, o: ta.Any) -> ta.Any:
1176
+ return self.ty.__members__[o] # type: ignore
1177
+
1178
+
1179
+ @dc.dataclass(frozen=True)
1180
+ class OptionalObjMarshaler(ObjMarshaler):
1181
+ item: ObjMarshaler
1182
+
1183
+ def marshal(self, o: ta.Any) -> ta.Any:
1184
+ if o is None:
1185
+ return None
1186
+ return self.item.marshal(o)
1187
+
1188
+ def unmarshal(self, o: ta.Any) -> ta.Any:
1189
+ if o is None:
1190
+ return None
1191
+ return self.item.unmarshal(o)
1192
+
1193
+
1194
+ @dc.dataclass(frozen=True)
1195
+ class MappingObjMarshaler(ObjMarshaler):
1196
+ ty: type
1197
+ km: ObjMarshaler
1198
+ vm: ObjMarshaler
1199
+
1200
+ def marshal(self, o: ta.Any) -> ta.Any:
1201
+ return {self.km.marshal(k): self.vm.marshal(v) for k, v in o.items()}
1202
+
1203
+ def unmarshal(self, o: ta.Any) -> ta.Any:
1204
+ return self.ty((self.km.unmarshal(k), self.vm.unmarshal(v)) for k, v in o.items())
1205
+
1206
+
1207
+ @dc.dataclass(frozen=True)
1208
+ class IterableObjMarshaler(ObjMarshaler):
1209
+ ty: type
1210
+ item: ObjMarshaler
1211
+
1212
+ def marshal(self, o: ta.Any) -> ta.Any:
1213
+ return [self.item.marshal(e) for e in o]
1214
+
1215
+ def unmarshal(self, o: ta.Any) -> ta.Any:
1216
+ return self.ty(self.item.unmarshal(e) for e in o)
1217
+
1218
+
1219
+ @dc.dataclass(frozen=True)
1220
+ class DataclassObjMarshaler(ObjMarshaler):
1221
+ ty: type
1222
+ fs: ta.Mapping[str, ObjMarshaler]
1223
+ nonstrict: bool = False
1224
+
1225
+ def marshal(self, o: ta.Any) -> ta.Any:
1226
+ return {k: m.marshal(getattr(o, k)) for k, m in self.fs.items()}
1227
+
1228
+ def unmarshal(self, o: ta.Any) -> ta.Any:
1229
+ return self.ty(**{k: self.fs[k].unmarshal(v) for k, v in o.items() if self.nonstrict or k in self.fs})
1230
+
1231
+
1232
+ @dc.dataclass(frozen=True)
1233
+ class PolymorphicObjMarshaler(ObjMarshaler):
1234
+ class Impl(ta.NamedTuple):
1235
+ ty: type
1236
+ tag: str
1237
+ m: ObjMarshaler
1238
+
1239
+ impls_by_ty: ta.Mapping[type, Impl]
1240
+ impls_by_tag: ta.Mapping[str, Impl]
1241
+
1242
+ def marshal(self, o: ta.Any) -> ta.Any:
1243
+ impl = self.impls_by_ty[type(o)]
1244
+ return {impl.tag: impl.m.marshal(o)}
1245
+
1246
+ def unmarshal(self, o: ta.Any) -> ta.Any:
1247
+ [(t, v)] = o.items()
1248
+ impl = self.impls_by_tag[t]
1249
+ return impl.m.unmarshal(v)
1250
+
1251
+
1252
+ @dc.dataclass(frozen=True)
1253
+ class DatetimeObjMarshaler(ObjMarshaler):
1254
+ ty: type
1255
+
1256
+ def marshal(self, o: ta.Any) -> ta.Any:
1257
+ return o.isoformat()
1258
+
1259
+ def unmarshal(self, o: ta.Any) -> ta.Any:
1260
+ return self.ty.fromisoformat(o) # type: ignore
1261
+
1262
+
1263
+ class DecimalObjMarshaler(ObjMarshaler):
1264
+ def marshal(self, o: ta.Any) -> ta.Any:
1265
+ return str(check_isinstance(o, decimal.Decimal))
1266
+
1267
+ def unmarshal(self, v: ta.Any) -> ta.Any:
1268
+ return decimal.Decimal(check_isinstance(v, str))
1269
+
1270
+
1271
+ class FractionObjMarshaler(ObjMarshaler):
1272
+ def marshal(self, o: ta.Any) -> ta.Any:
1273
+ fr = check_isinstance(o, fractions.Fraction)
1274
+ return [fr.numerator, fr.denominator]
1275
+
1276
+ def unmarshal(self, v: ta.Any) -> ta.Any:
1277
+ num, denom = check_isinstance(v, list)
1278
+ return fractions.Fraction(num, denom)
1279
+
1280
+
1281
+ class UuidObjMarshaler(ObjMarshaler):
1282
+ def marshal(self, o: ta.Any) -> ta.Any:
1283
+ return str(o)
1284
+
1285
+ def unmarshal(self, o: ta.Any) -> ta.Any:
1286
+ return uuid.UUID(o)
1287
+
1288
+
1289
+ _OBJ_MARSHALERS: ta.Dict[ta.Any, ObjMarshaler] = {
1290
+ **{t: NopObjMarshaler() for t in (type(None),)},
1291
+ **{t: CastObjMarshaler(t) for t in (int, float, str, bool)},
1292
+ **{t: Base64ObjMarshaler(t) for t in (bytes, bytearray)},
1293
+ **{t: IterableObjMarshaler(t, DynamicObjMarshaler()) for t in (list, tuple, set, frozenset)},
1294
+ **{t: MappingObjMarshaler(t, DynamicObjMarshaler(), DynamicObjMarshaler()) for t in (dict,)},
1295
+
1296
+ ta.Any: DynamicObjMarshaler(),
1297
+
1298
+ **{t: DatetimeObjMarshaler(t) for t in (datetime.date, datetime.time, datetime.datetime)},
1299
+ decimal.Decimal: DecimalObjMarshaler(),
1300
+ fractions.Fraction: FractionObjMarshaler(),
1301
+ uuid.UUID: UuidObjMarshaler(),
1302
+ }
1303
+
1304
+ _OBJ_MARSHALER_GENERIC_MAPPING_TYPES: ta.Dict[ta.Any, type] = {
1305
+ **{t: t for t in (dict,)},
1306
+ **{t: dict for t in (collections.abc.Mapping, collections.abc.MutableMapping)},
1307
+ }
1308
+
1309
+ _OBJ_MARSHALER_GENERIC_ITERABLE_TYPES: ta.Dict[ta.Any, type] = {
1310
+ **{t: t for t in (list, tuple, set, frozenset)},
1311
+ collections.abc.Set: frozenset,
1312
+ collections.abc.MutableSet: set,
1313
+ collections.abc.Sequence: tuple,
1314
+ collections.abc.MutableSequence: list,
1315
+ }
1316
+
1317
+
1318
+ def register_opj_marshaler(ty: ta.Any, m: ObjMarshaler) -> None:
1319
+ if ty in _OBJ_MARSHALERS:
1320
+ raise KeyError(ty)
1321
+ _OBJ_MARSHALERS[ty] = m
1322
+
1323
+
1324
+ def _make_obj_marshaler(ty: ta.Any) -> ObjMarshaler:
1325
+ if isinstance(ty, type):
1326
+ if abc.ABC in ty.__bases__:
1327
+ impls = [ # type: ignore
1328
+ PolymorphicObjMarshaler.Impl(
1329
+ ity,
1330
+ ity.__qualname__,
1331
+ get_obj_marshaler(ity),
1332
+ )
1333
+ for ity in deep_subclasses(ty)
1334
+ if abc.ABC not in ity.__bases__
1335
+ ]
1336
+ return PolymorphicObjMarshaler(
1337
+ {i.ty: i for i in impls},
1338
+ {i.tag: i for i in impls},
1339
+ )
1340
+
1341
+ if issubclass(ty, enum.Enum):
1342
+ return EnumObjMarshaler(ty)
1343
+
1344
+ if dc.is_dataclass(ty):
1345
+ return DataclassObjMarshaler(
1346
+ ty,
1347
+ {f.name: get_obj_marshaler(f.type) for f in dc.fields(ty)},
1348
+ )
1349
+
1350
+ if is_generic_alias(ty):
1351
+ try:
1352
+ mt = _OBJ_MARSHALER_GENERIC_MAPPING_TYPES[ta.get_origin(ty)]
1353
+ except KeyError:
1354
+ pass
1355
+ else:
1356
+ k, v = ta.get_args(ty)
1357
+ return MappingObjMarshaler(mt, get_obj_marshaler(k), get_obj_marshaler(v))
1358
+
1359
+ try:
1360
+ st = _OBJ_MARSHALER_GENERIC_ITERABLE_TYPES[ta.get_origin(ty)]
1361
+ except KeyError:
1362
+ pass
1363
+ else:
1364
+ [e] = ta.get_args(ty)
1365
+ return IterableObjMarshaler(st, get_obj_marshaler(e))
1366
+
1367
+ if is_union_alias(ty):
1368
+ return OptionalObjMarshaler(get_obj_marshaler(get_optional_alias_arg(ty)))
1369
+
1370
+ raise TypeError(ty)
1371
+
1372
+
1373
+ def get_obj_marshaler(ty: ta.Any) -> ObjMarshaler:
1374
+ try:
1375
+ return _OBJ_MARSHALERS[ty]
1376
+ except KeyError:
1377
+ pass
1378
+
1379
+ p = ProxyObjMarshaler()
1380
+ _OBJ_MARSHALERS[ty] = p
1381
+ try:
1382
+ m = _make_obj_marshaler(ty)
1383
+ except Exception:
1384
+ del _OBJ_MARSHALERS[ty]
1385
+ raise
1386
+ else:
1387
+ p.m = m
1388
+ _OBJ_MARSHALERS[ty] = m
1389
+ return m
1390
+
1391
+
1392
+ def marshal_obj(o: ta.Any, ty: ta.Any = None) -> ta.Any:
1393
+ return get_obj_marshaler(ty if ty is not None else type(o)).marshal(o)
1394
+
1395
+
1396
+ def unmarshal_obj(o: ta.Any, ty: ta.Union[ta.Type[T], ta.Any]) -> T:
1397
+ return get_obj_marshaler(ty).unmarshal(o)
1398
+
1399
+
1400
+ ########################################
1401
+ # ../../../../../omlish/lite/runtime.py
1402
+
1403
+
1404
+ @cached_nullary
1405
+ def is_debugger_attached() -> bool:
1406
+ return any(frame[1].endswith('pydevd.py') for frame in inspect.stack())
1407
+
1408
+
1409
+ REQUIRED_PYTHON_VERSION = (3, 8)
1410
+
1411
+
1412
+ def check_runtime_version() -> None:
1413
+ if sys.version_info < REQUIRED_PYTHON_VERSION:
1414
+ raise OSError(
1415
+ f'Requires python {REQUIRED_PYTHON_VERSION}, got {sys.version_info} from {sys.executable}') # noqa
1416
+
1417
+
1418
+ ########################################
1419
+ # ../journald/messages.py
1420
+
1421
+
1422
+ @dc.dataclass(frozen=True)
1423
+ class JournalctlMessage:
1424
+ raw: bytes
1425
+ dct: ta.Optional[ta.Mapping[str, ta.Any]] = None
1426
+ cursor: ta.Optional[str] = None
1427
+ ts_us: ta.Optional[int] = None # microseconds UTC
1428
+
1429
+
1430
+ class JournalctlMessageBuilder:
1431
+ def __init__(self) -> None:
1432
+ super().__init__()
1433
+
1434
+ self._buf = DelimitingBuffer(b'\n')
1435
+
1436
+ _cursor_field = '__CURSOR'
1437
+ _timestamp_field = '_SOURCE_REALTIME_TIMESTAMP'
1438
+
1439
+ def _make_message(self, raw: bytes) -> JournalctlMessage:
1440
+ dct = None
1441
+ cursor = None
1442
+ ts = None
1443
+
1444
+ try:
1445
+ dct = json.loads(raw.decode('utf-8', 'replace'))
1446
+ except Exception: # noqa
1447
+ log.exception('Failed to parse raw message: %r', raw)
1448
+
1449
+ else:
1450
+ cursor = dct.get(self._cursor_field)
1451
+
1452
+ if tsv := dct.get(self._timestamp_field):
1453
+ if isinstance(tsv, str):
1454
+ try:
1455
+ ts = int(tsv)
1456
+ except ValueError:
1457
+ try:
1458
+ ts = int(float(tsv))
1459
+ except ValueError:
1460
+ log.exception('Failed to parse timestamp: %r', tsv)
1461
+ elif isinstance(tsv, (int, float)):
1462
+ ts = int(tsv)
1463
+ else:
1464
+ log.exception('Invalid timestamp: %r', tsv)
1465
+
1466
+ return JournalctlMessage(
1467
+ raw=raw,
1468
+ dct=dct,
1469
+ cursor=cursor,
1470
+ ts_us=ts,
1471
+ )
1472
+
1473
+ def feed(self, data: bytes) -> ta.Sequence[JournalctlMessage]:
1474
+ ret: ta.List[JournalctlMessage] = []
1475
+ for line in self._buf.feed(data):
1476
+ ret.append(self._make_message(check_isinstance(line, bytes))) # type: ignore
1477
+ return ret
1478
+
1479
+
1480
+ ########################################
1481
+ # ../threadworker.py
1482
+
1483
+
1484
+ class ThreadWorker(abc.ABC):
1485
+ def __init__(
1486
+ self,
1487
+ *,
1488
+ stop_event: ta.Optional[threading.Event] = None,
1489
+ ) -> None:
1490
+ super().__init__()
1491
+
1492
+ if stop_event is None:
1493
+ stop_event = threading.Event()
1494
+ self._stop_event = stop_event
1495
+
1496
+ self._thread: ta.Optional[threading.Thread] = None
1497
+
1498
+ self._last_heartbeat: ta.Optional[float] = None
1499
+
1500
+ #
1501
+
1502
+ def should_stop(self) -> bool:
1503
+ return self._stop_event.is_set()
1504
+
1505
+ #
1506
+
1507
+ @property
1508
+ def last_heartbeat(self) -> ta.Optional[float]:
1509
+ return self._last_heartbeat
1510
+
1511
+ def _heartbeat(self) -> bool:
1512
+ self._last_heartbeat = time.time()
1513
+
1514
+ if self.should_stop():
1515
+ log.info('Stopping: %s', self)
1516
+ return False
1517
+
1518
+ return True
1519
+
1520
+ #
1521
+
1522
+ def is_alive(self) -> bool:
1523
+ return (thr := self._thread) is not None and thr.is_alive()
1524
+
1525
+ def start(self) -> None:
1526
+ thr = threading.Thread(target=self._run)
1527
+ self._thread = thr
1528
+ thr.start()
1529
+
1530
+ @abc.abstractmethod
1531
+ def _run(self) -> None:
1532
+ raise NotImplementedError
1533
+
1534
+ def stop(self) -> None:
1535
+ raise NotImplementedError
1536
+
1537
+ def cleanup(self) -> None: # noqa
1538
+ pass
1539
+
1540
+
1541
+ ########################################
1542
+ # ../../logs.py
1543
+ """
1544
+ https://docs.aws.amazon.com/AmazonCloudWatchLogs/latest/APIReference/API_PutLogEvents.html :
1545
+ - The maximum batch size is 1,048,576 bytes. This size is calculated as the sum of all event messages in UTF-8, plus 26
1546
+ bytes for each log event.
1547
+ - None of the log events in the batch can be more than 2 hours in the future.
1548
+ - None of the log events in the batch can be more than 14 days in the past. Also, none of the log events can be from
1549
+ earlier than the retention period of the log group.
1550
+ - The log events in the batch must be in chronological order by their timestamp. The timestamp is the time that the
1551
+ event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. (In AWS Tools for PowerShell
1552
+ and the AWS SDK for .NET, the timestamp is specified in .NET format: yyyy-mm-ddThh:mm:ss. For example,
1553
+ 2017-09-15T13:45:30.)
1554
+ - A batch of log events in a single request cannot span more than 24 hours. Otherwise, the operation fails.
1555
+ - Each log event can be no larger than 256 KB.
1556
+ - The maximum number of log events in a batch is 10,000.
1557
+ """
1558
+
1559
+
1560
+ ##
1561
+
1562
+
1563
+ @dc.dataclass(frozen=True)
1564
+ class AwsLogEvent(AwsDataclass):
1565
+ message: str
1566
+ timestamp: int # milliseconds UTC
1567
+
1568
+
1569
+ @dc.dataclass(frozen=True)
1570
+ class AwsPutLogEventsRequest(AwsDataclass):
1571
+ log_group_name: str
1572
+ log_stream_name: str
1573
+ log_events: ta.Sequence[AwsLogEvent]
1574
+ sequence_token: ta.Optional[str] = None
1575
+
1576
+
1577
+ @dc.dataclass(frozen=True)
1578
+ class AwsRejectedLogEventsInfo(AwsDataclass):
1579
+ expired_log_event_end_index: ta.Optional[int] = None
1580
+ too_new_log_event_start_index: ta.Optional[int] = None
1581
+ too_old_log_event_end_index: ta.Optional[int] = None
1582
+
1583
+
1584
+ @dc.dataclass(frozen=True)
1585
+ class AwsPutLogEventsResponse(AwsDataclass):
1586
+ next_sequence_token: ta.Optional[str] = None
1587
+ rejected_log_events_info: ta.Optional[AwsRejectedLogEventsInfo] = None
1588
+
1589
+ raw: ta.Optional[AwsDataclass.Raw] = None
1590
+
1591
+
1592
+ ##
1593
+
1594
+
1595
+ class AwsLogMessagePoster:
1596
+ """
1597
+ TODO:
1598
+ - max_items
1599
+ - max_bytes - manually build body
1600
+ - flush_interval
1601
+ - !! sort by timestamp
1602
+ """
1603
+
1604
+ DEFAULT_URL = 'https://logs.{region_name}.amazonaws.com/' # noqa
1605
+
1606
+ DEFAULT_SERVICE_NAME = 'logs'
1607
+
1608
+ DEFAULT_TARGET = 'Logs_20140328.PutLogEvents'
1609
+ DEFAULT_CONTENT_TYPE = 'application/x-amz-json-1.1'
1610
+
1611
+ DEFAULT_HEADERS: ta.Mapping[str, str] = {
1612
+ 'X-Amz-Target': DEFAULT_TARGET,
1613
+ 'Content-Type': DEFAULT_CONTENT_TYPE,
1614
+ }
1615
+
1616
+ def __init__(
1617
+ self,
1618
+ log_group_name: str,
1619
+ log_stream_name: str,
1620
+ region_name: str,
1621
+ credentials: AwsSigner.Credentials,
1622
+
1623
+ url: ta.Optional[str] = None,
1624
+ service_name: str = DEFAULT_SERVICE_NAME,
1625
+ headers: ta.Optional[ta.Mapping[str, str]] = None,
1626
+ extra_headers: ta.Optional[ta.Mapping[str, str]] = None,
1627
+ ) -> None:
1628
+ super().__init__()
1629
+
1630
+ self._log_group_name = check_non_empty_str(log_group_name)
1631
+ self._log_stream_name = check_non_empty_str(log_stream_name)
1632
+
1633
+ if url is None:
1634
+ url = self.DEFAULT_URL.format(region_name=region_name)
1635
+ self._url = url
1636
+
1637
+ if headers is None:
1638
+ headers = self.DEFAULT_HEADERS
1639
+ if extra_headers is not None:
1640
+ headers = {**headers, **extra_headers}
1641
+ self._headers = {k: [v] for k, v in headers.items()}
1642
+
1643
+ self._signer = V4AwsSigner(
1644
+ credentials,
1645
+ region_name,
1646
+ service_name,
1647
+ )
1648
+
1649
+ #
1650
+
1651
+ @dc.dataclass(frozen=True)
1652
+ class Message:
1653
+ message: str
1654
+ ts_ms: int # milliseconds UTC
1655
+
1656
+ @dc.dataclass(frozen=True)
1657
+ class Post:
1658
+ url: str
1659
+ headers: ta.Mapping[str, str]
1660
+ data: bytes
1661
+
1662
+ def feed(self, messages: ta.Sequence[Message]) -> ta.Sequence[Post]:
1663
+ if not messages:
1664
+ return []
1665
+
1666
+ payload = AwsPutLogEventsRequest(
1667
+ log_group_name=self._log_group_name,
1668
+ log_stream_name=self._log_stream_name,
1669
+ log_events=[
1670
+ AwsLogEvent(
1671
+ message=m.message,
1672
+ timestamp=m.ts_ms,
1673
+ )
1674
+ for m in messages
1675
+ ],
1676
+ )
1677
+
1678
+ body = json.dumps(
1679
+ payload.to_aws(),
1680
+ indent=None,
1681
+ separators=(',', ':'),
1682
+ ).encode('utf-8')
1683
+
1684
+ sig_req = V4AwsSigner.Request(
1685
+ method='POST',
1686
+ url=self._url,
1687
+ headers=self._headers,
1688
+ payload=body,
1689
+ )
1690
+
1691
+ sig_headers = self._signer.sign(
1692
+ sig_req,
1693
+ sign_payload=False,
1694
+ )
1695
+ sig_req = dc.replace(sig_req, headers={**sig_req.headers, **sig_headers})
1696
+
1697
+ post = AwsLogMessagePoster.Post(
1698
+ url=self._url,
1699
+ headers={k: check_single(v) for k, v in sig_req.headers.items()},
1700
+ data=sig_req.payload,
1701
+ )
1702
+
1703
+ return [post]
1704
+
1705
+
1706
+ ########################################
1707
+ # ../../../../../omlish/lite/subprocesses.py
1708
+
1709
+
1710
+ ##
1711
+
1712
+
1713
+ _SUBPROCESS_SHELL_WRAP_EXECS = False
1714
+
1715
+
1716
+ def subprocess_shell_wrap_exec(*args: str) -> ta.Tuple[str, ...]:
1717
+ return ('sh', '-c', ' '.join(map(shlex.quote, args)))
1718
+
1719
+
1720
+ def subprocess_maybe_shell_wrap_exec(*args: str) -> ta.Tuple[str, ...]:
1721
+ if _SUBPROCESS_SHELL_WRAP_EXECS or is_debugger_attached():
1722
+ return subprocess_shell_wrap_exec(*args)
1723
+ else:
1724
+ return args
1725
+
1726
+
1727
+ def _prepare_subprocess_invocation(
1728
+ *args: str,
1729
+ env: ta.Optional[ta.Mapping[str, ta.Any]] = None,
1730
+ extra_env: ta.Optional[ta.Mapping[str, ta.Any]] = None,
1731
+ quiet: bool = False,
1732
+ shell: bool = False,
1733
+ **kwargs: ta.Any,
1734
+ ) -> ta.Tuple[ta.Tuple[ta.Any, ...], ta.Dict[str, ta.Any]]:
1735
+ log.debug(args)
1736
+ if extra_env:
1737
+ log.debug(extra_env)
1738
+
1739
+ if extra_env:
1740
+ env = {**(env if env is not None else os.environ), **extra_env}
1741
+
1742
+ if quiet and 'stderr' not in kwargs:
1743
+ if not log.isEnabledFor(logging.DEBUG):
1744
+ kwargs['stderr'] = subprocess.DEVNULL
1745
+
1746
+ if not shell:
1747
+ args = subprocess_maybe_shell_wrap_exec(*args)
1748
+
1749
+ return args, dict(
1750
+ env=env,
1751
+ shell=shell,
1752
+ **kwargs,
1753
+ )
1754
+
1755
+
1756
+ def subprocess_check_call(*args: str, stdout=sys.stderr, **kwargs: ta.Any) -> None:
1757
+ args, kwargs = _prepare_subprocess_invocation(*args, stdout=stdout, **kwargs)
1758
+ return subprocess.check_call(args, **kwargs) # type: ignore
1759
+
1760
+
1761
+ def subprocess_check_output(*args: str, **kwargs: ta.Any) -> bytes:
1762
+ args, kwargs = _prepare_subprocess_invocation(*args, **kwargs)
1763
+ return subprocess.check_output(args, **kwargs)
1764
+
1765
+
1766
+ def subprocess_check_output_str(*args: str, **kwargs: ta.Any) -> str:
1767
+ return subprocess_check_output(*args, **kwargs).decode().strip()
1768
+
1769
+
1770
+ ##
1771
+
1772
+
1773
+ DEFAULT_SUBPROCESS_TRY_EXCEPTIONS: ta.Tuple[ta.Type[Exception], ...] = (
1774
+ FileNotFoundError,
1775
+ subprocess.CalledProcessError,
1776
+ )
1777
+
1778
+
1779
+ def subprocess_try_call(
1780
+ *args: str,
1781
+ try_exceptions: ta.Tuple[ta.Type[Exception], ...] = DEFAULT_SUBPROCESS_TRY_EXCEPTIONS,
1782
+ **kwargs: ta.Any,
1783
+ ) -> bool:
1784
+ try:
1785
+ subprocess_check_call(*args, **kwargs)
1786
+ except try_exceptions as e: # noqa
1787
+ if log.isEnabledFor(logging.DEBUG):
1788
+ log.exception('command failed')
1789
+ return False
1790
+ else:
1791
+ return True
1792
+
1793
+
1794
+ def subprocess_try_output(
1795
+ *args: str,
1796
+ try_exceptions: ta.Tuple[ta.Type[Exception], ...] = DEFAULT_SUBPROCESS_TRY_EXCEPTIONS,
1797
+ **kwargs: ta.Any,
1798
+ ) -> ta.Optional[bytes]:
1799
+ try:
1800
+ return subprocess_check_output(*args, **kwargs)
1801
+ except try_exceptions as e: # noqa
1802
+ if log.isEnabledFor(logging.DEBUG):
1803
+ log.exception('command failed')
1804
+ return None
1805
+
1806
+
1807
+ def subprocess_try_output_str(*args: str, **kwargs: ta.Any) -> ta.Optional[str]:
1808
+ out = subprocess_try_output(*args, **kwargs)
1809
+ return out.decode().strip() if out is not None else None
1810
+
1811
+
1812
+ ########################################
1813
+ # ../journald/tailer.py
1814
+
1815
+
1816
+ class JournalctlTailerWorker(ThreadWorker):
1817
+ DEFAULT_CMD: ta.ClassVar[ta.Sequence[str]] = ['journalctl']
1818
+
1819
+ def __init__(
1820
+ self,
1821
+ output, # type: queue.Queue[ta.Sequence[JournalctlMessage]]
1822
+ *,
1823
+ since: ta.Optional[str] = None,
1824
+ after_cursor: ta.Optional[str] = None,
1825
+
1826
+ cmd: ta.Optional[ta.Sequence[str]] = None,
1827
+ shell_wrap: bool = False,
1828
+
1829
+ read_size: int = 0x4000,
1830
+ sleep_s: float = 1.,
1831
+
1832
+ **kwargs: ta.Any,
1833
+ ) -> None:
1834
+ super().__init__(**kwargs)
1835
+
1836
+ self._output = output
1837
+
1838
+ self._since = since
1839
+ self._after_cursor = after_cursor
1840
+
1841
+ self._cmd = cmd or self.DEFAULT_CMD
1842
+ self._shell_wrap = shell_wrap
1843
+
1844
+ self._read_size = read_size
1845
+ self._sleep_s = sleep_s
1846
+
1847
+ self._mb = JournalctlMessageBuilder()
1848
+
1849
+ self._proc: ta.Optional[subprocess.Popen] = None
1850
+
1851
+ @cached_nullary
1852
+ def _full_cmd(self) -> ta.Sequence[str]:
1853
+ cmd = [
1854
+ *self._cmd,
1855
+ '--output', 'json',
1856
+ '--show-cursor',
1857
+ '--follow',
1858
+ ]
1859
+
1860
+ if self._since is not None:
1861
+ cmd.extend(['--since', self._since])
1862
+
1863
+ if self._after_cursor is not None:
1864
+ cmd.extend(['--after-cursor', self._after_cursor])
1865
+
1866
+ if self._shell_wrap:
1867
+ cmd = list(subprocess_shell_wrap_exec(*cmd))
1868
+
1869
+ return cmd
1870
+
1871
+ def _run(self) -> None:
1872
+ with subprocess.Popen(
1873
+ self._full_cmd(),
1874
+ stdout=subprocess.PIPE,
1875
+ ) as self._proc:
1876
+ stdout = check_not_none(self._proc.stdout)
1877
+
1878
+ fd = stdout.fileno()
1879
+ fl = fcntl.fcntl(fd, fcntl.F_GETFL)
1880
+ fcntl.fcntl(fd, fcntl.F_SETFL, fl | os.O_NONBLOCK)
1881
+
1882
+ while True:
1883
+ if not self._heartbeat():
1884
+ break
1885
+
1886
+ while stdout.readable():
1887
+ if not self._heartbeat():
1888
+ break
1889
+
1890
+ buf = stdout.read(self._read_size)
1891
+ if not buf:
1892
+ log.debug('Journalctl empty read')
1893
+ break
1894
+
1895
+ log.debug('Journalctl read buffer: %r', buf)
1896
+ msgs = self._mb.feed(buf)
1897
+ if msgs:
1898
+ self._output.put(msgs)
1899
+
1900
+ if self._proc.poll() is not None:
1901
+ log.critical('Journalctl process terminated')
1902
+ break
1903
+
1904
+ log.debug('Journalctl readable')
1905
+ time.sleep(self._sleep_s)
1906
+
1907
+
1908
+ ########################################
1909
+ # main.py
1910
+
1911
+
1912
+ @dc.dataclass(frozen=True)
1913
+ class JournalctlOpts:
1914
+ after_cursor: ta.Optional[str] = None
1915
+
1916
+ since: ta.Optional[str] = None
1917
+ until: ta.Optional[str] = None
1918
+
1919
+
1920
+ class JournalctlToAws:
1921
+ @dc.dataclass(frozen=True)
1922
+ class Config:
1923
+ pid_file: ta.Optional[str] = None
1924
+
1925
+ cursor_file: ta.Optional[str] = None
1926
+
1927
+ #
1928
+
1929
+ aws_log_group_name: str = 'omlish'
1930
+ aws_log_stream_name: ta.Optional[str] = None
1931
+
1932
+ aws_access_key_id: ta.Optional[str] = None
1933
+ aws_secret_access_key: ta.Optional[str] = dc.field(default=None, repr=False)
1934
+
1935
+ aws_region_name: str = 'us-west-1'
1936
+
1937
+ #
1938
+
1939
+ journalctl_cmd: ta.Optional[ta.Sequence[str]] = None
1940
+
1941
+ journalctl_after_cursor: ta.Optional[str] = None
1942
+ journalctl_since: ta.Optional[str] = None
1943
+
1944
+ #
1945
+
1946
+ dry_run: bool = False
1947
+
1948
+ def __init__(self, config: Config) -> None:
1949
+ super().__init__()
1950
+ self._config = config
1951
+
1952
+ #
1953
+
1954
+ _es: contextlib.ExitStack
1955
+
1956
+ def __enter__(self) -> 'JournalctlToAws':
1957
+ self._es = contextlib.ExitStack().__enter__()
1958
+ return self
1959
+
1960
+ def __exit__(self, exc_type, exc_val, exc_tb):
1961
+ return self._es.__exit__(exc_type, exc_val, exc_tb)
1962
+
1963
+ #
1964
+
1965
+ @cached_nullary
1966
+ def _pidfile(self) -> ta.Optional[Pidfile]:
1967
+ if self._config.pid_file is None:
1968
+ return None
1969
+
1970
+ pfp = os.path.expanduser(self._config.pid_file)
1971
+
1972
+ log.info('Opening pidfile %s', pfp)
1973
+
1974
+ pf = self._es.enter_context(Pidfile(pfp))
1975
+ pf.write()
1976
+ return pf
1977
+
1978
+ def _ensure_locked(self) -> None:
1979
+ if (pf := self._pidfile()) is not None:
1980
+ pf.ensure_locked()
1981
+
1982
+ #
1983
+
1984
+ def _read_cursor_file(self) -> ta.Optional[str]:
1985
+ self._ensure_locked()
1986
+
1987
+ if not (cf := self._config.cursor_file):
1988
+ return None
1989
+ cf = os.path.expanduser(cf)
1990
+
1991
+ try:
1992
+ with open(cf) as f:
1993
+ return f.read().strip()
1994
+ except FileNotFoundError:
1995
+ return None
1996
+
1997
+ def _write_cursor_file(self, cursor: str) -> None:
1998
+ self._ensure_locked()
1999
+
2000
+ if not (cf := self._config.cursor_file):
2001
+ return
2002
+ cf = os.path.expanduser(cf)
2003
+
2004
+ log.info('Writing cursor file %s : %s', cf, cursor)
2005
+ with open(ncf := cf + '.next', 'w') as f:
2006
+ f.write(cursor)
2007
+
2008
+ os.rename(ncf, cf)
2009
+
2010
+ #
2011
+
2012
+ @cached_nullary
2013
+ def _aws_credentials(self) -> AwsSigner.Credentials:
2014
+ return AwsSigner.Credentials(
2015
+ access_key_id=check_non_empty_str(self._config.aws_access_key_id),
2016
+ secret_access_key=check_non_empty_str(self._config.aws_secret_access_key),
2017
+ )
2018
+
2019
+ @cached_nullary
2020
+ def _aws_log_message_poster(self) -> AwsLogMessagePoster:
2021
+ return AwsLogMessagePoster(
2022
+ log_group_name=self._config.aws_log_group_name,
2023
+ log_stream_name=check_non_empty_str(self._config.aws_log_stream_name),
2024
+ region_name=self._config.aws_region_name,
2025
+ credentials=check_not_none(self._aws_credentials()),
2026
+ )
2027
+
2028
+ #
2029
+
2030
+ @cached_nullary
2031
+ def _journalctl_message_queue(self): # type: () -> queue.Queue[ta.Sequence[JournalctlMessage]]
2032
+ return queue.Queue()
2033
+
2034
+ @cached_nullary
2035
+ def _journalctl_tailer_worker(self) -> JournalctlTailerWorker:
2036
+ ac: ta.Optional[str] = self._config.journalctl_after_cursor
2037
+ if ac is None:
2038
+ ac = self._read_cursor_file()
2039
+ if ac is not None:
2040
+ log.info('Starting from cursor %s', ac)
2041
+
2042
+ if (since := self._config.journalctl_since):
2043
+ log.info('Starting since %s', since)
2044
+
2045
+ return JournalctlTailerWorker(
2046
+ self._journalctl_message_queue(),
2047
+
2048
+ since=since,
2049
+ after_cursor=ac,
2050
+
2051
+ cmd=self._config.journalctl_cmd,
2052
+ shell_wrap=is_debugger_attached(),
2053
+ )
2054
+
2055
+ #
2056
+
2057
+ def run(self) -> None:
2058
+ self._ensure_locked()
2059
+
2060
+ q = self._journalctl_message_queue()
2061
+ jtw = self._journalctl_tailer_worker()
2062
+ mp = self._aws_log_message_poster()
2063
+
2064
+ jtw.start()
2065
+
2066
+ last_cursor: ta.Optional[str] = None # noqa
2067
+ while True:
2068
+ if not jtw.is_alive():
2069
+ log.critical('Journalctl tailer worker died')
2070
+ break
2071
+
2072
+ msgs: ta.Sequence[JournalctlMessage] = q.get()
2073
+ log.debug('%r', msgs)
2074
+
2075
+ cur_cursor: ta.Optional[str] = None
2076
+ for m in reversed(msgs):
2077
+ if m.cursor is not None:
2078
+ cur_cursor = m.cursor
2079
+ break
2080
+
2081
+ if not msgs:
2082
+ log.warning('Empty queue chunk')
2083
+ continue
2084
+
2085
+ [post] = mp.feed([mp.Message(
2086
+ message=json.dumps(m.dct),
2087
+ ts_ms=int(time.time() * 1000.),
2088
+ ) for m in msgs])
2089
+ log.debug('%r', post)
2090
+
2091
+ if not self._config.dry_run:
2092
+ with urllib.request.urlopen(urllib.request.Request( # noqa
2093
+ post.url,
2094
+ method='POST',
2095
+ headers=dict(post.headers),
2096
+ data=post.data,
2097
+ )) as resp:
2098
+ response = AwsPutLogEventsResponse.from_aws(json.loads(resp.read().decode('utf-8')))
2099
+ log.debug('%r', response)
2100
+
2101
+ if cur_cursor is not None:
2102
+ self._write_cursor_file(cur_cursor)
2103
+ last_cursor = cur_cursor # noqa
2104
+
2105
+
2106
+ def _main() -> None:
2107
+ parser = argparse.ArgumentParser()
2108
+
2109
+ parser.add_argument('--config-file')
2110
+ parser.add_argument('-v', '--verbose', action='store_true')
2111
+
2112
+ parser.add_argument('--after-cursor', nargs='?')
2113
+ parser.add_argument('--since', nargs='?')
2114
+ parser.add_argument('--dry-run', action='store_true')
2115
+
2116
+ parser.add_argument('--message', nargs='?')
2117
+ parser.add_argument('--real', action='store_true')
2118
+
2119
+ args = parser.parse_args()
2120
+
2121
+ #
2122
+
2123
+ configure_standard_logging('DEBUG' if args.verbose else 'INFO')
2124
+
2125
+ #
2126
+
2127
+ config: JournalctlToAws.Config
2128
+ if args.config_file:
2129
+ with open(os.path.expanduser(args.config_file)) as cf:
2130
+ config_dct = json.load(cf)
2131
+ config = unmarshal_obj(config_dct, JournalctlToAws.Config)
2132
+ else:
2133
+ config = JournalctlToAws.Config()
2134
+
2135
+ #
2136
+
2137
+ for k in ['aws_access_key_id', 'aws_secret_access_key']:
2138
+ if not getattr(config, k) and k.upper() in os.environ:
2139
+ config = dc.replace(config, **{k: os.environ.get(k.upper())}) # type: ignore
2140
+
2141
+ #
2142
+
2143
+ if not args.real:
2144
+ config = dc.replace(config, journalctl_cmd=[
2145
+ sys.executable,
2146
+ os.path.join(os.path.dirname(__file__), 'journald', 'genmessages.py'),
2147
+ '--sleep-n', '2',
2148
+ '--sleep-s', '.5',
2149
+ *(['--message', args.message] if args.message else []),
2150
+ '100000',
2151
+ ])
2152
+
2153
+ #
2154
+
2155
+ for a in ['after_cursor', 'since', 'dry_run']:
2156
+ if (pa := getattr(args, a)):
2157
+ config = dc.replace(config, **{a: pa})
2158
+
2159
+ #
2160
+
2161
+ with JournalctlToAws(config) as jta:
2162
+ jta.run()
2163
+
2164
+
2165
+ if __name__ == '__main__':
2166
+ _main()