ominfra 0.0.0.dev77__py3-none-any.whl → 0.0.0.dev79__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,3 +1,5 @@
1
+ # ruff: noqa: UP006 UP007
2
+ # @omlish-lite
1
3
  """
2
4
  https://docs.aws.amazon.com/IAM/latest/UserGuide/create-signed-request.html
3
5
 
@@ -6,6 +8,7 @@ TODO:
6
8
  - boto / s3transfer upload_fileobj doesn't stream either lol - eagerly calcs Content-MD5
7
9
  - sts tokens
8
10
  - !! fix canonical_qs - sort params
11
+ - secrets
9
12
  """
10
13
  import dataclasses as dc
11
14
  import datetime
@@ -14,123 +17,125 @@ import hmac
14
17
  import typing as ta
15
18
  import urllib.parse
16
19
 
17
- from omlish import check
18
- from omlish import lang
20
+ from omlish.lite.check import check_equal
21
+ from omlish.lite.check import check_non_empty_str
22
+ from omlish.lite.check import check_not_isinstance
19
23
 
20
24
 
21
25
  ##
22
26
 
23
27
 
24
- HttpMap: ta.TypeAlias = ta.Mapping[str, ta.Sequence[str]]
25
-
26
-
27
- def make_http_map(*kvs: tuple[str, str]) -> HttpMap:
28
- out: dict[str, list[str]] = {}
29
- for k, v in kvs:
30
- out.setdefault(k, []).append(v)
31
- return out
32
-
33
-
34
- #
35
-
36
- @dc.dataclass(frozen=True)
37
- class Credentials:
38
- access_key: str
39
- secret_key: str = dc.field(repr=False)
40
-
28
+ class AwsSigner:
29
+ def __init__(
30
+ self,
31
+ creds: 'AwsSigner.Credentials',
32
+ region_name: str,
33
+ service_name: str,
34
+ ) -> None:
35
+ super().__init__()
36
+ self._creds = creds
37
+ self._region_name = region_name
38
+ self._service_name = service_name
41
39
 
42
- @dc.dataclass(frozen=True)
43
- class Request:
44
- method: str
45
- url: str
46
- headers: HttpMap = dc.field(default_factory=dict)
47
- payload: bytes = b''
40
+ #
48
41
 
42
+ @dc.dataclass(frozen=True)
43
+ class Credentials:
44
+ access_key: str
45
+ secret_key: str = dc.field(repr=False)
49
46
 
50
- ##
47
+ @dc.dataclass(frozen=True)
48
+ class Request:
49
+ method: str
50
+ url: str
51
+ headers: ta.Mapping[str, ta.Sequence[str]] = dc.field(default_factory=dict)
52
+ payload: bytes = b''
51
53
 
54
+ #
52
55
 
53
- def _host_from_url(url: str) -> str:
54
- url_parts = urllib.parse.urlsplit(url)
55
- host = check.non_empty_str(url_parts.hostname)
56
- default_ports = {
57
- 'http': 80,
58
- 'https': 443,
59
- }
60
- if url_parts.port is not None:
61
- if url_parts.port != default_ports.get(url_parts.scheme):
62
- host = '%s:%d' % (host, url_parts.port)
63
- return host
56
+ ISO8601 = '%Y%m%dT%H%M%SZ'
64
57
 
58
+ #
65
59
 
66
- def _as_bytes(data: str | bytes) -> bytes:
67
- return data if isinstance(data, bytes) else data.encode('utf-8')
60
+ @staticmethod
61
+ def _host_from_url(url: str) -> str:
62
+ url_parts = urllib.parse.urlsplit(url)
63
+ host = check_non_empty_str(url_parts.hostname)
64
+ default_ports = {
65
+ 'http': 80,
66
+ 'https': 443,
67
+ }
68
+ if url_parts.port is not None:
69
+ if url_parts.port != default_ports.get(url_parts.scheme):
70
+ host = '%s:%d' % (host, url_parts.port)
71
+ return host
68
72
 
73
+ @staticmethod
74
+ def _lower_case_http_map(d: ta.Mapping[str, ta.Sequence[str]]) -> ta.Mapping[str, ta.Sequence[str]]:
75
+ o: ta.Dict[str, ta.List[str]] = {}
76
+ for k, vs in d.items():
77
+ o.setdefault(k.lower(), []).extend(check_not_isinstance(vs, str))
78
+ return o
69
79
 
70
- def _sha256(data: str | bytes) -> str:
71
- return hashlib.sha256(_as_bytes(data)).hexdigest()
80
+ #
72
81
 
82
+ @staticmethod
83
+ def _as_bytes(data: ta.Union[str, bytes]) -> bytes:
84
+ return data if isinstance(data, bytes) else data.encode('utf-8')
73
85
 
74
- def _sha256_sign(key: bytes, msg: str | bytes) -> bytes:
75
- return hmac.new(key, _as_bytes(msg), hashlib.sha256).digest()
86
+ @staticmethod
87
+ def _sha256(data: ta.Union[str, bytes]) -> str:
88
+ return hashlib.sha256(AwsSigner._as_bytes(data)).hexdigest()
76
89
 
90
+ @staticmethod
91
+ def _sha256_sign(key: bytes, msg: ta.Union[str, bytes]) -> bytes:
92
+ return hmac.new(key, AwsSigner._as_bytes(msg), hashlib.sha256).digest()
77
93
 
78
- def _sha256_sign_hex(key: bytes, msg: str | bytes) -> str:
79
- return hmac.new(key, _as_bytes(msg), hashlib.sha256).hexdigest()
94
+ @staticmethod
95
+ def _sha256_sign_hex(key: bytes, msg: ta.Union[str, bytes]) -> str:
96
+ return hmac.new(key, AwsSigner._as_bytes(msg), hashlib.sha256).hexdigest()
80
97
 
98
+ _EMPTY_SHA256: str
81
99
 
82
- _EMPTY_SHA256 = _sha256(b'')
100
+ #
83
101
 
84
- _ISO8601 = '%Y%m%dT%H%M%SZ'
102
+ _SIGNED_HEADERS_BLACKLIST = frozenset([
103
+ 'authorization',
104
+ 'expect',
105
+ 'user-agent',
106
+ 'x-amzn-trace-id',
107
+ ])
85
108
 
86
- _SIGNED_HEADERS_BLACKLIST = frozenset([
87
- 'expect',
88
- 'user-agent',
89
- 'x-amzn-trace-id',
90
- ])
109
+ def _validate_request(self, req: Request) -> None:
110
+ check_non_empty_str(req.method)
111
+ check_equal(req.method.upper(), req.method)
112
+ for k, vs in req.headers.items():
113
+ check_equal(k.strip(), k)
114
+ for v in vs:
115
+ check_equal(v.strip(), v)
91
116
 
92
117
 
93
- def _lower_case_http_map(d: HttpMap) -> HttpMap:
94
- o: dict[str, list[str]] = {}
95
- for k, vs in d.items():
96
- o.setdefault(k.lower(), []).extend(vs)
97
- return o
118
+ AwsSigner._EMPTY_SHA256 = AwsSigner._sha256(b'') # noqa
98
119
 
99
120
 
100
- class V4AwsSigner:
101
- def __init__(
102
- self,
103
- creds: Credentials,
104
- region_name: str,
105
- service_name: str,
106
- ) -> None:
107
- super().__init__()
108
- self._creds = creds
109
- self._region_name = region_name
110
- self._service_name = service_name
121
+ ##
111
122
 
112
- def _validate_request(self, req: Request) -> None:
113
- check.non_empty_str(req.method)
114
- check.equal(req.method.upper(), req.method)
115
- for k, vs in req.headers.items():
116
- check.equal(k.strip(), k)
117
- for v in vs:
118
- check.equal(v.strip(), v)
119
123
 
124
+ class V4AwsSigner(AwsSigner):
120
125
  def sign(
121
126
  self,
122
- req: Request,
127
+ req: AwsSigner.Request,
123
128
  *,
124
129
  sign_payload: bool = False,
125
- utcnow: datetime.datetime | None = None,
126
- ) -> HttpMap:
130
+ utcnow: ta.Optional[datetime.datetime] = None,
131
+ ) -> ta.Mapping[str, ta.Sequence[str]]:
127
132
  self._validate_request(req)
128
133
 
129
134
  #
130
135
 
131
136
  if utcnow is None:
132
- utcnow = lang.utcnow()
133
- req_dt = utcnow.strftime(_ISO8601)
137
+ utcnow = datetime.datetime.now(tz=datetime.timezone.utc) # noqa
138
+ req_dt = utcnow.strftime(self.ISO8601)
134
139
 
135
140
  #
136
141
 
@@ -140,18 +145,18 @@ class V4AwsSigner:
140
145
 
141
146
  #
142
147
 
143
- headers_to_sign = {
144
- k: v
145
- for k, v in _lower_case_http_map(req.headers).items()
146
- if k not in _SIGNED_HEADERS_BLACKLIST
148
+ headers_to_sign: ta.Dict[str, ta.List[str]] = {
149
+ k: list(v)
150
+ for k, v in self._lower_case_http_map(req.headers).items()
151
+ if k not in self._SIGNED_HEADERS_BLACKLIST
147
152
  }
148
153
 
149
154
  if 'host' not in headers_to_sign:
150
- headers_to_sign['host'] = [_host_from_url(req.url)]
155
+ headers_to_sign['host'] = [self._host_from_url(req.url)]
151
156
 
152
157
  headers_to_sign['x-amz-date'] = [req_dt]
153
158
 
154
- hashed_payload = _sha256(req.payload) if req.payload else _EMPTY_SHA256
159
+ hashed_payload = self._sha256(req.payload) if req.payload else self._EMPTY_SHA256
155
160
  if sign_payload:
156
161
  headers_to_sign['x-amz-content-sha256'] = [hashed_payload]
157
162
 
@@ -183,7 +188,7 @@ class V4AwsSigner:
183
188
  'aws4_request',
184
189
  ]
185
190
  scope = '/'.join(scope_parts)
186
- hashed_canon_req = _sha256(canon_req)
191
+ hashed_canon_req = self._sha256(canon_req)
187
192
  string_to_sign = '\n'.join([
188
193
  algorithm,
189
194
  req_dt,
@@ -194,11 +199,11 @@ class V4AwsSigner:
194
199
  #
195
200
 
196
201
  key = self._creds.secret_key
197
- key_date = _sha256_sign(f'AWS4{key}'.encode('utf-8'), req_dt[:8]) # noqa
198
- key_region = _sha256_sign(key_date, self._region_name)
199
- key_service = _sha256_sign(key_region, self._service_name)
200
- key_signing = _sha256_sign(key_service, 'aws4_request')
201
- sig = _sha256_sign_hex(key_signing, string_to_sign)
202
+ key_date = self._sha256_sign(f'AWS4{key}'.encode('utf-8'), req_dt[:8]) # noqa
203
+ key_region = self._sha256_sign(key_date, self._region_name)
204
+ key_service = self._sha256_sign(key_region, self._service_name)
205
+ key_signing = self._sha256_sign(key_service, 'aws4_request')
206
+ sig = self._sha256_sign_hex(key_signing, string_to_sign)
202
207
 
203
208
  #
204
209
 
@@ -0,0 +1,149 @@
1
+ # ruff: noqa: UP006 UP007
2
+ # @omlish-lite
3
+ import collections.abc
4
+ import dataclasses as dc
5
+ import typing as ta
6
+
7
+ from omlish.lite.cached import cached_nullary
8
+ from omlish.lite.reflect import get_optional_alias_arg
9
+ from omlish.lite.reflect import is_generic_alias
10
+ from omlish.lite.reflect import is_optional_alias
11
+ from omlish.lite.strings import camel_case
12
+
13
+
14
+ class AwsDataclass:
15
+ class Raw(dict):
16
+ pass
17
+
18
+ #
19
+
20
+ _aws_meta: ta.ClassVar[ta.Optional['AwsDataclassMeta']] = None
21
+
22
+ @classmethod
23
+ def _get_aws_meta(cls) -> 'AwsDataclassMeta':
24
+ try:
25
+ return cls.__dict__['_aws_meta']
26
+ except KeyError:
27
+ pass
28
+ ret = cls._aws_meta = AwsDataclassMeta(cls)
29
+ return ret
30
+
31
+ #
32
+
33
+ def to_aws(self) -> ta.Mapping[str, ta.Any]:
34
+ return self._get_aws_meta().converters().d2a(self)
35
+
36
+ @classmethod
37
+ def from_aws(cls, v: ta.Mapping[str, ta.Any]) -> 'AwsDataclass':
38
+ return cls._get_aws_meta().converters().a2d(v)
39
+
40
+
41
+ @dc.dataclass(frozen=True)
42
+ class AwsDataclassMeta:
43
+ cls: ta.Type['AwsDataclass']
44
+
45
+ #
46
+
47
+ class Field(ta.NamedTuple):
48
+ d_name: str
49
+ a_name: str
50
+ is_opt: bool
51
+ is_seq: bool
52
+ dc_cls: ta.Optional[ta.Type['AwsDataclass']]
53
+
54
+ @cached_nullary
55
+ def fields(self) -> ta.Sequence[Field]:
56
+ fs = []
57
+ for f in dc.fields(self.cls): # type: ignore # noqa
58
+ d_name = f.name
59
+ a_name = camel_case(d_name, lower=True)
60
+
61
+ is_opt = False
62
+ is_seq = False
63
+ dc_cls = None
64
+
65
+ c = f.type
66
+ if c is AwsDataclass.Raw:
67
+ continue
68
+
69
+ if is_optional_alias(c):
70
+ is_opt = True
71
+ c = get_optional_alias_arg(c)
72
+
73
+ if is_generic_alias(c) and ta.get_origin(c) is collections.abc.Sequence:
74
+ is_seq = True
75
+ [c] = ta.get_args(c)
76
+
77
+ if is_generic_alias(c):
78
+ raise TypeError(c)
79
+
80
+ if isinstance(c, type) and issubclass(c, AwsDataclass):
81
+ dc_cls = c
82
+
83
+ fs.append(AwsDataclassMeta.Field(
84
+ d_name=d_name,
85
+ a_name=a_name,
86
+ is_opt=is_opt,
87
+ is_seq=is_seq,
88
+ dc_cls=dc_cls,
89
+ ))
90
+
91
+ return fs
92
+
93
+ #
94
+
95
+ class Converters(ta.NamedTuple):
96
+ d2a: ta.Callable
97
+ a2d: ta.Callable
98
+
99
+ @cached_nullary
100
+ def converters(self) -> Converters:
101
+ for df in dc.fields(self.cls): # type: ignore # noqa
102
+ c = df.type
103
+
104
+ if is_optional_alias(c):
105
+ c = get_optional_alias_arg(c)
106
+
107
+ if c is AwsDataclass.Raw:
108
+ rf = df.name
109
+ break
110
+
111
+ else:
112
+ rf = None
113
+
114
+ fs = [
115
+ (f, f.dc_cls._get_aws_meta().converters() if f.dc_cls is not None else None) # noqa
116
+ for f in self.fields()
117
+ ]
118
+
119
+ def d2a(o):
120
+ dct = {}
121
+ for f, cs in fs:
122
+ x = getattr(o, f.d_name)
123
+ if x is None:
124
+ continue
125
+ if cs is not None:
126
+ if f.is_seq:
127
+ x = list(map(cs.d2a, x))
128
+ else:
129
+ x = cs.d2a(x)
130
+ dct[f.a_name] = x
131
+ return dct
132
+
133
+ def a2d(v):
134
+ dct = {}
135
+ for f, cs in fs:
136
+ x = v.get(f.a_name)
137
+ if x is None:
138
+ continue
139
+ if cs is not None:
140
+ if f.is_seq:
141
+ x = list(map(cs.a2d, x))
142
+ else:
143
+ x = cs.a2d(x)
144
+ dct[f.d_name] = x
145
+ if rf is not None:
146
+ dct[rf] = self.cls.Raw(v)
147
+ return self.cls(**dct)
148
+
149
+ return AwsDataclassMeta.Converters(d2a, a2d)
File without changes
@@ -0,0 +1,67 @@
1
+ # ruff: noqa: UP006 UP007
2
+ # @omlish-lite
3
+ import dataclasses as dc
4
+ import json
5
+ import typing as ta
6
+
7
+ from omlish.lite.check import check_isinstance
8
+ from omlish.lite.io import DelimitingBuffer
9
+ from omlish.lite.logs import log
10
+
11
+
12
+ @dc.dataclass(frozen=True)
13
+ class JournalctlMessage:
14
+ raw: bytes
15
+ dct: ta.Optional[ta.Mapping[str, ta.Any]] = None
16
+ cursor: ta.Optional[str] = None
17
+ ts_us: ta.Optional[int] = None # microseconds UTC
18
+
19
+
20
+ class JournalctlMessageBuilder:
21
+ def __init__(self) -> None:
22
+ super().__init__()
23
+
24
+ self._buf = DelimitingBuffer(b'\n')
25
+
26
+ _cursor_field = '__CURSOR'
27
+ _timestamp_field = '_SOURCE_REALTIME_TIMESTAMP'
28
+
29
+ def _make_message(self, raw: bytes) -> JournalctlMessage:
30
+ dct = None
31
+ cursor = None
32
+ ts = None
33
+
34
+ try:
35
+ dct = json.loads(raw.decode('utf-8', 'replace'))
36
+ except Exception: # noqa
37
+ log.exception('Failed to parse raw message: %r', raw)
38
+
39
+ else:
40
+ cursor = dct.get(self._cursor_field)
41
+
42
+ if tsv := dct.get(self._timestamp_field):
43
+ if isinstance(tsv, str):
44
+ try:
45
+ ts = int(tsv)
46
+ except ValueError:
47
+ try:
48
+ ts = int(float(tsv))
49
+ except ValueError:
50
+ log.exception('Failed to parse timestamp: %r', tsv)
51
+ elif isinstance(tsv, (int, float)):
52
+ ts = int(tsv)
53
+ else:
54
+ log.exception('Invalid timestamp: %r', tsv)
55
+
56
+ return JournalctlMessage(
57
+ raw=raw,
58
+ dct=dct,
59
+ cursor=cursor,
60
+ ts_us=ts,
61
+ )
62
+
63
+ def feed(self, data: bytes) -> ta.Sequence[JournalctlMessage]:
64
+ ret: ta.List[JournalctlMessage] = []
65
+ for line in self._buf.feed(data):
66
+ ret.append(self._make_message(check_isinstance(line, bytes))) # type: ignore
67
+ return ret
@@ -0,0 +1,173 @@
1
+ # @omlish-lite
2
+ # ruff: noqa: UP007
3
+ """
4
+ https://docs.aws.amazon.com/AmazonCloudWatchLogs/latest/APIReference/API_PutLogEvents.html :
5
+ - The maximum batch size is 1,048,576 bytes. This size is calculated as the sum of all event messages in UTF-8, plus 26
6
+ bytes for each log event.
7
+ - None of the log events in the batch can be more than 2 hours in the future.
8
+ - None of the log events in the batch can be more than 14 days in the past. Also, none of the log events can be from
9
+ earlier than the retention period of the log group.
10
+ - The log events in the batch must be in chronological order by their timestamp. The timestamp is the time that the
11
+ event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. (In AWS Tools for PowerShell
12
+ and the AWS SDK for .NET, the timestamp is specified in .NET format: yyyy-mm-ddThh:mm:ss. For example,
13
+ 2017-09-15T13:45:30.)
14
+ - A batch of log events in a single request cannot span more than 24 hours. Otherwise, the operation fails.
15
+ - Each log event can be no larger than 256 KB.
16
+ - The maximum number of log events in a batch is 10,000.
17
+ """
18
+ import dataclasses as dc
19
+ import json
20
+ import typing as ta
21
+
22
+ from omlish.lite.check import check_non_empty_str
23
+ from omlish.lite.check import check_single
24
+
25
+ from .auth import AwsSigner
26
+ from .auth import V4AwsSigner
27
+ from .dataclasses import AwsDataclass
28
+
29
+
30
+ ##
31
+
32
+
33
+ @dc.dataclass(frozen=True)
34
+ class AwsLogEvent(AwsDataclass):
35
+ message: str
36
+ timestamp: int # milliseconds UTC
37
+
38
+
39
+ @dc.dataclass(frozen=True)
40
+ class AwsPutLogEventsRequest(AwsDataclass):
41
+ log_group_name: str
42
+ log_stream_name: str
43
+ log_events: ta.Sequence[AwsLogEvent]
44
+ sequence_token: ta.Optional[str] = None
45
+
46
+
47
+ @dc.dataclass(frozen=True)
48
+ class AwsRejectedLogEventsInfo(AwsDataclass):
49
+ expired_log_event_end_index: ta.Optional[int] = None
50
+ too_new_log_event_start_index: ta.Optional[int] = None
51
+ too_old_log_event_end_index: ta.Optional[int] = None
52
+
53
+
54
+ @dc.dataclass(frozen=True)
55
+ class AwsPutLogEventsResponse(AwsDataclass):
56
+ next_sequence_token: ta.Optional[str] = None
57
+ rejected_log_events_info: ta.Optional[AwsRejectedLogEventsInfo] = None
58
+
59
+ raw: ta.Optional[AwsDataclass.Raw] = None
60
+
61
+
62
+ ##
63
+
64
+
65
+ class AwsLogMessagePoster:
66
+ """
67
+ TODO:
68
+ - max_items
69
+ - max_bytes - manually build body
70
+ - flush_interval
71
+ - !! sort by timestamp
72
+ """
73
+
74
+ DEFAULT_URL = 'https://logs.{region_name}.amazonaws.com/' # noqa
75
+
76
+ DEFAULT_SERVICE_NAME = 'logs'
77
+
78
+ DEFAULT_TARGET = 'Logs_20140328.PutLogEvents'
79
+ DEFAULT_CONTENT_TYPE = 'application/x-amz-json-1.1'
80
+
81
+ DEFAULT_HEADERS: ta.Mapping[str, str] = {
82
+ 'X-Amz-Target': DEFAULT_TARGET,
83
+ 'Content-Type': DEFAULT_CONTENT_TYPE,
84
+ }
85
+
86
+ def __init__(
87
+ self,
88
+ log_group_name: str,
89
+ log_stream_name: str,
90
+ region_name: str,
91
+ credentials: AwsSigner.Credentials,
92
+
93
+ url: ta.Optional[str] = None,
94
+ service_name: str = DEFAULT_SERVICE_NAME,
95
+ headers: ta.Optional[ta.Mapping[str, str]] = None,
96
+ extra_headers: ta.Optional[ta.Mapping[str, str]] = None,
97
+ ) -> None:
98
+ super().__init__()
99
+
100
+ self._log_group_name = check_non_empty_str(log_group_name)
101
+ self._log_stream_name = check_non_empty_str(log_stream_name)
102
+
103
+ if url is None:
104
+ url = self.DEFAULT_URL.format(region_name=region_name)
105
+ self._url = url
106
+
107
+ if headers is None:
108
+ headers = self.DEFAULT_HEADERS
109
+ if extra_headers is not None:
110
+ headers = {**headers, **extra_headers}
111
+ self._headers = {k: [v] for k, v in headers.items()}
112
+
113
+ self._signer = V4AwsSigner(
114
+ credentials,
115
+ region_name,
116
+ service_name,
117
+ )
118
+
119
+ #
120
+
121
+ @dc.dataclass(frozen=True)
122
+ class Message:
123
+ message: str
124
+ ts_ms: int # milliseconds UTC
125
+
126
+ @dc.dataclass(frozen=True)
127
+ class Post:
128
+ url: str
129
+ headers: ta.Mapping[str, str]
130
+ data: bytes
131
+
132
+ def feed(self, messages: ta.Sequence[Message]) -> ta.Sequence[Post]:
133
+ if not messages:
134
+ return []
135
+
136
+ payload = AwsPutLogEventsRequest(
137
+ log_group_name=self._log_group_name,
138
+ log_stream_name=self._log_stream_name,
139
+ log_events=[
140
+ AwsLogEvent(
141
+ message=m.message,
142
+ timestamp=m.ts_ms,
143
+ )
144
+ for m in messages
145
+ ],
146
+ )
147
+
148
+ body = json.dumps(
149
+ payload.to_aws(),
150
+ indent=None,
151
+ separators=(',', ':'),
152
+ ).encode('utf-8')
153
+
154
+ sig_req = V4AwsSigner.Request(
155
+ method='POST',
156
+ url=self._url,
157
+ headers=self._headers,
158
+ payload=body,
159
+ )
160
+
161
+ sig_headers = self._signer.sign(
162
+ sig_req,
163
+ sign_payload=False,
164
+ )
165
+ sig_req = dc.replace(sig_req, headers={**sig_req.headers, **sig_headers})
166
+
167
+ post = AwsLogMessagePoster.Post(
168
+ url=self._url,
169
+ headers={k: check_single(v) for k, v in sig_req.headers.items()},
170
+ data=sig_req.payload,
171
+ )
172
+
173
+ return [post]
@@ -168,6 +168,23 @@ def check_state(v: bool, msg: str = 'Illegal state') -> None:
168
168
  raise ValueError(msg)
169
169
 
170
170
 
171
+ def check_equal(l: T, r: T) -> T:
172
+ if l != r:
173
+ raise ValueError(l, r)
174
+ return l
175
+
176
+
177
+ def check_not_equal(l: T, r: T) -> T:
178
+ if l == r:
179
+ raise ValueError(l, r)
180
+ return l
181
+
182
+
183
+ def check_single(vs: ta.Iterable[T]) -> T:
184
+ [v] = vs
185
+ return v
186
+
187
+
171
188
  ########################################
172
189
  # ../../../../omlish/lite/json.py
173
190
 
@@ -251,6 +251,23 @@ def check_state(v: bool, msg: str = 'Illegal state') -> None:
251
251
  raise ValueError(msg)
252
252
 
253
253
 
254
+ def check_equal(l: T, r: T) -> T:
255
+ if l != r:
256
+ raise ValueError(l, r)
257
+ return l
258
+
259
+
260
+ def check_not_equal(l: T, r: T) -> T:
261
+ if l == r:
262
+ raise ValueError(l, r)
263
+ return l
264
+
265
+
266
+ def check_single(vs: ta.Iterable[T]) -> T:
267
+ [v] = vs
268
+ return v
269
+
270
+
254
271
  ########################################
255
272
  # ../../../omlish/lite/json.py
256
273
 
File without changes
@@ -2,7 +2,7 @@
2
2
  # noinspection DuplicatedCode
3
3
  # @omlish-lite
4
4
  # @omlish-script
5
- # @omlish-amalg-output supervisor.py
5
+ # @omlish-amalg-output ../supervisor/supervisor.py
6
6
  # ruff: noqa: N802 UP006 UP007 UP036
7
7
  import abc
8
8
  import contextlib
@@ -717,6 +717,23 @@ def check_state(v: bool, msg: str = 'Illegal state') -> None:
717
717
  raise ValueError(msg)
718
718
 
719
719
 
720
+ def check_equal(l: T, r: T) -> T:
721
+ if l != r:
722
+ raise ValueError(l, r)
723
+ return l
724
+
725
+
726
+ def check_not_equal(l: T, r: T) -> T:
727
+ if l == r:
728
+ raise ValueError(l, r)
729
+ return l
730
+
731
+
732
+ def check_single(vs: ta.Iterable[T]) -> T:
733
+ [v] = vs
734
+ return v
735
+
736
+
720
737
  ########################################
721
738
  # ../../../omlish/lite/json.py
722
739
 
@@ -1992,12 +2009,6 @@ class OutputDispatcher(Dispatcher):
1992
2009
  - route the output to the appropriate log handlers as specified in the config.
1993
2010
  """
1994
2011
 
1995
- child_log = None # the current logger (normal_log or capture_log)
1996
- normal_log = None # the "normal" (non-capture) logger
1997
- capture_log = None # the logger used while we're in capture_mode
1998
- capture_mode = False # are we capturing process event data
1999
- output_buffer = b'' # data waiting to be logged
2000
-
2001
2012
  def __init__(self, process: AbstractSubprocess, event_type, fd):
2002
2013
  """
2003
2014
  Initialize the dispatcher.
@@ -2012,7 +2023,10 @@ class OutputDispatcher(Dispatcher):
2012
2023
  self._init_normal_log()
2013
2024
  self._init_capture_log()
2014
2025
 
2015
- self.child_log = self.normal_log
2026
+ self._child_log = self._normal_log
2027
+
2028
+ self._capture_mode = False # are we capturing process event data
2029
+ self._output_buffer = b'' # data waiting to be logged
2016
2030
 
2017
2031
  # all code below is purely for minor speedups
2018
2032
  begin_token = self.event_type.BEGIN_TOKEN
@@ -2025,6 +2039,10 @@ class OutputDispatcher(Dispatcher):
2025
2039
  self.stdout_events_enabled = config.stdout.events_enabled
2026
2040
  self.stderr_events_enabled = config.stderr.events_enabled
2027
2041
 
2042
+ _child_log: ta.Optional[logging.Logger] # the current logger (normal_log or capture_log)
2043
+ _normal_log: ta.Optional[logging.Logger] # the "normal" (non-capture) logger
2044
+ _capture_log: ta.Optional[logging.Logger] # the logger used while we're in capture_mode
2045
+
2028
2046
  def _init_normal_log(self) -> None:
2029
2047
  """
2030
2048
  Configure the "normal" (non-capture) log for this channel of this process. Sets self.normal_log if logging is
@@ -2039,7 +2057,7 @@ class OutputDispatcher(Dispatcher):
2039
2057
  to_syslog = self.lc.syslog
2040
2058
 
2041
2059
  if logfile or to_syslog:
2042
- self.normal_log = logging.getLogger(__name__)
2060
+ self._normal_log = logging.getLogger(__name__)
2043
2061
 
2044
2062
  # if logfile:
2045
2063
  # loggers.handle_file(
@@ -2057,29 +2075,29 @@ class OutputDispatcher(Dispatcher):
2057
2075
  # fmt=config.name + ' %(message)s',
2058
2076
  # )
2059
2077
 
2060
- def _init_capture_log(self):
2078
+ def _init_capture_log(self) -> None:
2061
2079
  """
2062
2080
  Configure the capture log for this process. This log is used to temporarily capture output when special output
2063
2081
  is detected. Sets self.capture_log if capturing is enabled.
2064
2082
  """
2065
2083
  capture_maxbytes = self.lc.capture_maxbytes
2066
2084
  if capture_maxbytes:
2067
- self.capture_log = logging.getLogger(__name__)
2085
+ self._capture_log = logging.getLogger(__name__)
2068
2086
  # loggers.handle_boundIO(
2069
- # self.capture_log,
2087
+ # self._capture_log,
2070
2088
  # fmt='%(message)s',
2071
2089
  # maxbytes=capture_maxbytes,
2072
2090
  # )
2073
2091
 
2074
2092
  def remove_logs(self):
2075
- for log in (self.normal_log, self.capture_log):
2093
+ for log in (self._normal_log, self._capture_log):
2076
2094
  if log is not None:
2077
2095
  for handler in log.handlers:
2078
2096
  handler.remove() # type: ignore
2079
2097
  handler.reopen() # type: ignore
2080
2098
 
2081
2099
  def reopen_logs(self):
2082
- for log in (self.normal_log, self.capture_log):
2100
+ for log in (self._normal_log, self._capture_log):
2083
2101
  if log is not None:
2084
2102
  for handler in log.handlers:
2085
2103
  handler.reopen() # type: ignore
@@ -2088,8 +2106,8 @@ class OutputDispatcher(Dispatcher):
2088
2106
  if data:
2089
2107
  if self._process.context.config.strip_ansi:
2090
2108
  data = strip_escapes(data)
2091
- if self.child_log:
2092
- self.child_log.info(data) # type: ignore
2109
+ if self._child_log:
2110
+ self._child_log.info(data)
2093
2111
  if self.log_to_main_log:
2094
2112
  if not isinstance(data, bytes):
2095
2113
  text = data
@@ -2106,23 +2124,23 @@ class OutputDispatcher(Dispatcher):
2106
2124
  notify_event(ProcessLogStderrEvent(self._process, self._process.pid, data))
2107
2125
 
2108
2126
  def record_output(self):
2109
- if self.capture_log is None:
2127
+ if self._capture_log is None:
2110
2128
  # shortcut trying to find capture data
2111
- data = self.output_buffer
2112
- self.output_buffer = b''
2129
+ data = self._output_buffer
2130
+ self._output_buffer = b''
2113
2131
  self._log(data)
2114
2132
  return
2115
2133
 
2116
- if self.capture_mode:
2134
+ if self._capture_mode:
2117
2135
  token, tokenlen = self.end_token_data
2118
2136
  else:
2119
2137
  token, tokenlen = self.begin_token_data
2120
2138
 
2121
- if len(self.output_buffer) <= tokenlen:
2139
+ if len(self._output_buffer) <= tokenlen:
2122
2140
  return # not enough data
2123
2141
 
2124
- data = self.output_buffer
2125
- self.output_buffer = b''
2142
+ data = self._output_buffer
2143
+ self._output_buffer = b''
2126
2144
 
2127
2145
  try:
2128
2146
  before, after = data.split(token, 1)
@@ -2130,37 +2148,37 @@ class OutputDispatcher(Dispatcher):
2130
2148
  after = None
2131
2149
  index = find_prefix_at_end(data, token)
2132
2150
  if index:
2133
- self.output_buffer = self.output_buffer + data[-index:]
2151
+ self._output_buffer = self._output_buffer + data[-index:]
2134
2152
  data = data[:-index]
2135
2153
  self._log(data)
2136
2154
  else:
2137
2155
  self._log(before)
2138
2156
  self.toggle_capture_mode()
2139
- self.output_buffer = after # type: ignore
2157
+ self._output_buffer = after # type: ignore
2140
2158
 
2141
2159
  if after:
2142
2160
  self.record_output()
2143
2161
 
2144
2162
  def toggle_capture_mode(self):
2145
- self.capture_mode = not self.capture_mode
2163
+ self._capture_mode = not self._capture_mode
2146
2164
 
2147
- if self.capture_log is not None:
2148
- if self.capture_mode:
2149
- self.child_log = self.capture_log
2165
+ if self._capture_log is not None:
2166
+ if self._capture_mode:
2167
+ self._child_log = self._capture_log
2150
2168
  else:
2151
- for handler in self.capture_log.handlers:
2169
+ for handler in self._capture_log.handlers:
2152
2170
  handler.flush()
2153
- data = self.capture_log.getvalue() # type: ignore
2171
+ data = self._capture_log.getvalue() # type: ignore
2154
2172
  channel = self._channel
2155
2173
  procname = self._process.config.name
2156
2174
  event = self.event_type(self._process, self._process.pid, data)
2157
2175
  notify_event(event)
2158
2176
 
2159
2177
  log.debug('%r %s emitted a comm event', procname, channel)
2160
- for handler in self.capture_log.handlers:
2178
+ for handler in self._capture_log.handlers:
2161
2179
  handler.remove() # type: ignore
2162
2180
  handler.reopen() # type: ignore
2163
- self.child_log = self.normal_log
2181
+ self._child_log = self._normal_log
2164
2182
 
2165
2183
  def writable(self) -> bool:
2166
2184
  return False
@@ -2172,7 +2190,7 @@ class OutputDispatcher(Dispatcher):
2172
2190
 
2173
2191
  def handle_read_event(self) -> None:
2174
2192
  data = readfd(self._fd)
2175
- self.output_buffer += data
2193
+ self._output_buffer += data
2176
2194
  self.record_output()
2177
2195
  if not data:
2178
2196
  # if we get no data back from the pipe, it means that the child process has ended. See
@@ -1,7 +1,9 @@
1
+ # ruff: noqa: UP007
1
2
  import abc
2
3
  import errno
3
4
  import logging
4
5
  import os
6
+ import typing as ta
5
7
 
6
8
  from .compat import as_bytes
7
9
  from .compat import compact_traceback
@@ -85,12 +87,6 @@ class OutputDispatcher(Dispatcher):
85
87
  - route the output to the appropriate log handlers as specified in the config.
86
88
  """
87
89
 
88
- child_log = None # the current logger (normal_log or capture_log)
89
- normal_log = None # the "normal" (non-capture) logger
90
- capture_log = None # the logger used while we're in capture_mode
91
- capture_mode = False # are we capturing process event data
92
- output_buffer = b'' # data waiting to be logged
93
-
94
90
  def __init__(self, process: AbstractSubprocess, event_type, fd):
95
91
  """
96
92
  Initialize the dispatcher.
@@ -105,7 +101,10 @@ class OutputDispatcher(Dispatcher):
105
101
  self._init_normal_log()
106
102
  self._init_capture_log()
107
103
 
108
- self.child_log = self.normal_log
104
+ self._child_log = self._normal_log
105
+
106
+ self._capture_mode = False # are we capturing process event data
107
+ self._output_buffer = b'' # data waiting to be logged
109
108
 
110
109
  # all code below is purely for minor speedups
111
110
  begin_token = self.event_type.BEGIN_TOKEN
@@ -118,6 +117,10 @@ class OutputDispatcher(Dispatcher):
118
117
  self.stdout_events_enabled = config.stdout.events_enabled
119
118
  self.stderr_events_enabled = config.stderr.events_enabled
120
119
 
120
+ _child_log: ta.Optional[logging.Logger] # the current logger (normal_log or capture_log)
121
+ _normal_log: ta.Optional[logging.Logger] # the "normal" (non-capture) logger
122
+ _capture_log: ta.Optional[logging.Logger] # the logger used while we're in capture_mode
123
+
121
124
  def _init_normal_log(self) -> None:
122
125
  """
123
126
  Configure the "normal" (non-capture) log for this channel of this process. Sets self.normal_log if logging is
@@ -132,7 +135,7 @@ class OutputDispatcher(Dispatcher):
132
135
  to_syslog = self.lc.syslog
133
136
 
134
137
  if logfile or to_syslog:
135
- self.normal_log = logging.getLogger(__name__)
138
+ self._normal_log = logging.getLogger(__name__)
136
139
 
137
140
  # if logfile:
138
141
  # loggers.handle_file(
@@ -150,29 +153,29 @@ class OutputDispatcher(Dispatcher):
150
153
  # fmt=config.name + ' %(message)s',
151
154
  # )
152
155
 
153
- def _init_capture_log(self):
156
+ def _init_capture_log(self) -> None:
154
157
  """
155
158
  Configure the capture log for this process. This log is used to temporarily capture output when special output
156
159
  is detected. Sets self.capture_log if capturing is enabled.
157
160
  """
158
161
  capture_maxbytes = self.lc.capture_maxbytes
159
162
  if capture_maxbytes:
160
- self.capture_log = logging.getLogger(__name__)
163
+ self._capture_log = logging.getLogger(__name__)
161
164
  # loggers.handle_boundIO(
162
- # self.capture_log,
165
+ # self._capture_log,
163
166
  # fmt='%(message)s',
164
167
  # maxbytes=capture_maxbytes,
165
168
  # )
166
169
 
167
170
  def remove_logs(self):
168
- for log in (self.normal_log, self.capture_log):
171
+ for log in (self._normal_log, self._capture_log):
169
172
  if log is not None:
170
173
  for handler in log.handlers:
171
174
  handler.remove() # type: ignore
172
175
  handler.reopen() # type: ignore
173
176
 
174
177
  def reopen_logs(self):
175
- for log in (self.normal_log, self.capture_log):
178
+ for log in (self._normal_log, self._capture_log):
176
179
  if log is not None:
177
180
  for handler in log.handlers:
178
181
  handler.reopen() # type: ignore
@@ -181,8 +184,8 @@ class OutputDispatcher(Dispatcher):
181
184
  if data:
182
185
  if self._process.context.config.strip_ansi:
183
186
  data = strip_escapes(data)
184
- if self.child_log:
185
- self.child_log.info(data) # type: ignore
187
+ if self._child_log:
188
+ self._child_log.info(data)
186
189
  if self.log_to_main_log:
187
190
  if not isinstance(data, bytes):
188
191
  text = data
@@ -199,23 +202,23 @@ class OutputDispatcher(Dispatcher):
199
202
  notify_event(ProcessLogStderrEvent(self._process, self._process.pid, data))
200
203
 
201
204
  def record_output(self):
202
- if self.capture_log is None:
205
+ if self._capture_log is None:
203
206
  # shortcut trying to find capture data
204
- data = self.output_buffer
205
- self.output_buffer = b''
207
+ data = self._output_buffer
208
+ self._output_buffer = b''
206
209
  self._log(data)
207
210
  return
208
211
 
209
- if self.capture_mode:
212
+ if self._capture_mode:
210
213
  token, tokenlen = self.end_token_data
211
214
  else:
212
215
  token, tokenlen = self.begin_token_data
213
216
 
214
- if len(self.output_buffer) <= tokenlen:
217
+ if len(self._output_buffer) <= tokenlen:
215
218
  return # not enough data
216
219
 
217
- data = self.output_buffer
218
- self.output_buffer = b''
220
+ data = self._output_buffer
221
+ self._output_buffer = b''
219
222
 
220
223
  try:
221
224
  before, after = data.split(token, 1)
@@ -223,37 +226,37 @@ class OutputDispatcher(Dispatcher):
223
226
  after = None
224
227
  index = find_prefix_at_end(data, token)
225
228
  if index:
226
- self.output_buffer = self.output_buffer + data[-index:]
229
+ self._output_buffer = self._output_buffer + data[-index:]
227
230
  data = data[:-index]
228
231
  self._log(data)
229
232
  else:
230
233
  self._log(before)
231
234
  self.toggle_capture_mode()
232
- self.output_buffer = after # type: ignore
235
+ self._output_buffer = after # type: ignore
233
236
 
234
237
  if after:
235
238
  self.record_output()
236
239
 
237
240
  def toggle_capture_mode(self):
238
- self.capture_mode = not self.capture_mode
241
+ self._capture_mode = not self._capture_mode
239
242
 
240
- if self.capture_log is not None:
241
- if self.capture_mode:
242
- self.child_log = self.capture_log
243
+ if self._capture_log is not None:
244
+ if self._capture_mode:
245
+ self._child_log = self._capture_log
243
246
  else:
244
- for handler in self.capture_log.handlers:
247
+ for handler in self._capture_log.handlers:
245
248
  handler.flush()
246
- data = self.capture_log.getvalue() # type: ignore
249
+ data = self._capture_log.getvalue() # type: ignore
247
250
  channel = self._channel
248
251
  procname = self._process.config.name
249
252
  event = self.event_type(self._process, self._process.pid, data)
250
253
  notify_event(event)
251
254
 
252
255
  log.debug('%r %s emitted a comm event', procname, channel)
253
- for handler in self.capture_log.handlers:
256
+ for handler in self._capture_log.handlers:
254
257
  handler.remove() # type: ignore
255
258
  handler.reopen() # type: ignore
256
- self.child_log = self.normal_log
259
+ self._child_log = self._normal_log
257
260
 
258
261
  def writable(self) -> bool:
259
262
  return False
@@ -265,7 +268,7 @@ class OutputDispatcher(Dispatcher):
265
268
 
266
269
  def handle_read_event(self) -> None:
267
270
  data = readfd(self._fd)
268
- self.output_buffer += data
271
+ self._output_buffer += data
269
272
  self.record_output()
270
273
  if not data:
271
274
  # if we get no data back from the pipe, it means that the child process has ended. See
@@ -1,6 +1,6 @@
1
1
  #!/usr/bin/env python3
2
2
  # ruff: noqa: UP006 UP007
3
- # @omlish-amalg _supervisor.py
3
+ # @omlish-amalg ../scripts/supervisor.py
4
4
  import logging
5
5
  import signal
6
6
  import time
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ominfra
3
- Version: 0.0.0.dev77
3
+ Version: 0.0.0.dev79
4
4
  Summary: ominfra
5
5
  Author: wrmsr
6
6
  License: BSD-3-Clause
@@ -12,8 +12,8 @@ Classifier: Operating System :: OS Independent
12
12
  Classifier: Operating System :: POSIX
13
13
  Requires-Python: ~=3.12
14
14
  License-File: LICENSE
15
- Requires-Dist: omdev ==0.0.0.dev77
16
- Requires-Dist: omlish ==0.0.0.dev77
15
+ Requires-Dist: omdev ==0.0.0.dev79
16
+ Requires-Dist: omlish ==0.0.0.dev79
17
17
  Provides-Extra: all
18
18
  Requires-Dist: paramiko ~=3.5 ; extra == 'all'
19
19
  Requires-Dist: asyncssh ~=2.17 ; (python_version < "3.13") and extra == 'all'
@@ -6,11 +6,15 @@ ominfra/ssh.py,sha256=jQpc4WvkMckIfk4vILda8zFaeharRqc_6wxW50b0OjQ,5431
6
6
  ominfra/clouds/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
7
  ominfra/clouds/aws/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
8
  ominfra/clouds/aws/__main__.py,sha256=HXMoxEl9KHhv6zOOPQxiJAftfR2SjBqeVTYw-og9aFw,163
9
- ominfra/clouds/aws/auth.py,sha256=EW3lK1U0hnjXkTn1KWJeuv9GG0ibbKdvgLD0P6HJtwo,5502
9
+ ominfra/clouds/aws/auth.py,sha256=2X6S9ARNx3RK3KZSAJe7XunvlN0tBKEcySDvZaJA8fE,6185
10
10
  ominfra/clouds/aws/cli.py,sha256=OJVVLIwSy1378drkgP1ke_JltbyzBmnrB_Lom6A83os,510
11
+ ominfra/clouds/aws/dataclasses.py,sha256=rKhtJKJ0JhMssU9n9CABX_JaUiokIboEATJ9TZgZQ6A,3868
12
+ ominfra/clouds/aws/logs.py,sha256=VIT1fQazuaVSOfIlIcA9IxGT0Bqb69dbqcnbaTMEvSE,5185
11
13
  ominfra/clouds/aws/metadata.py,sha256=XR1BuMdQheyeFjjA3MN8GCNWVAp5ahoPdbWXEmViutQ,2767
14
+ ominfra/clouds/aws/journald2aws/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
+ ominfra/clouds/aws/journald2aws/journald.py,sha256=4Yv7GZA-1wloufTNPud5BUYGS5njVokAdb5onFPHza4,1990
12
16
  ominfra/deploy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
13
- ominfra/deploy/_executor.py,sha256=Xoib_itZzHzAQJ-AuJ2VVmGW5TG20DxUDaeDRbfs5cQ,32569
17
+ ominfra/deploy/_executor.py,sha256=zHn4zAz6Ch1i5R_EdKTfJv_4SE0QPNuQEk7O1ptB_7A,32834
14
18
  ominfra/deploy/configs.py,sha256=qi0kwT7G2NH7dXLOQic-u6R3yeadup_QtvrjwWIggbM,435
15
19
  ominfra/deploy/remote.py,sha256=6ACmpXU1uBdyGs3Xsp97ktKFq30cJlzN9LRWNUWlGY4,2144
16
20
  ominfra/deploy/executor/__init__.py,sha256=Y3l4WY4JRi2uLG6kgbGp93fuGfkxkKwZDvhsa0Rwgtk,15
@@ -39,31 +43,32 @@ ominfra/deploy/poly/venv.py,sha256=BoipDEa4NTeodjf3L57KJfq9eGKLagFNKwD8pS4yrzA,1
39
43
  ominfra/manage/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
40
44
  ominfra/manage/manage.py,sha256=BttL8LFEknHZE_h2Pt5dAqbfUkv6qy43WI0raXBZ1a8,151
41
45
  ominfra/pyremote/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
42
- ominfra/pyremote/_runcommands.py,sha256=nPtvx_oFHIViYb7V0ius23TF_2_SmEfDzdxa1_5LISc,26405
46
+ ominfra/pyremote/_runcommands.py,sha256=2UVHaUJjmWP8jMZE79j2Qk1E5IOyVV1qNSUbdr-zKYM,26670
43
47
  ominfra/pyremote/bootstrap.py,sha256=RvMO3YGaN1E4sgUi1JEtiPak8cjvqtc_vRCq1yqbeZg,3370
44
48
  ominfra/pyremote/runcommands.py,sha256=bviS0_TDIoZVAe4h-_iavbvJtVSFu8lnk7fQ5iasCWE,1571
49
+ ominfra/scripts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
50
+ ominfra/scripts/supervisor.py,sha256=eVD0NeSsnv-ckzb3c-EgzDRq7Kyr2dpvpPquW3Y690w,105707
45
51
  ominfra/supervisor/__init__.py,sha256=Y3l4WY4JRi2uLG6kgbGp93fuGfkxkKwZDvhsa0Rwgtk,15
46
52
  ominfra/supervisor/__main__.py,sha256=usW9jjq5JPe_2SL8H5PrjDdksO75MX85Ir0HFfb35eM,72
47
- ominfra/supervisor/_supervisor.py,sha256=ONztv2soL9y73S9Ax8zyG_-0KvEcHzFIbZb73XFkCao,105313
48
53
  ominfra/supervisor/compat.py,sha256=sqsvlCNF2iMFdrc0LuTfyCBxXSVACtQx2wCfeHwWvAQ,5044
49
54
  ominfra/supervisor/configs.py,sha256=FjgsFijC_ivqJkLua4ZV0UWjDxP3JeDua3aVy4_CnbM,2970
50
55
  ominfra/supervisor/context.py,sha256=xh03VN8e4hHj5udjtgUvYnqUticTzCCXdIA0Xp4Ba2c,15335
51
56
  ominfra/supervisor/datatypes.py,sha256=cq2p7wnLN0nvKT-jZxaBByqsnCIUz6pX9dPtm69h18Q,4428
52
- ominfra/supervisor/dispatchers.py,sha256=ye-gPdZ4RnOD2pE0mt2buEwtYdRR1vNa1xvCAMDwsTw,10212
57
+ ominfra/supervisor/dispatchers.py,sha256=4AVtJagOEalRnxwCZjNr8LpkCkykzzg2ncMZiYsC4Bc,10367
53
58
  ominfra/supervisor/events.py,sha256=wT-gPfvv2HCAyQXMq3jiek17Jq6kAZb0U2hekzjf3ks,7743
54
59
  ominfra/supervisor/exceptions.py,sha256=jq8Md--zmAHri1BB2XeDPFcTur81IRwArOcZoP7-6W0,746
55
60
  ominfra/supervisor/poller.py,sha256=oqNEA7i2XXtERBv552sr29a6mlogmosWjeGOZSul5Kg,7273
56
61
  ominfra/supervisor/process.py,sha256=phucIv2a-LHXypY3kJ9fCsKEki9G9XdiJtXCVMgGcZI,31291
57
62
  ominfra/supervisor/states.py,sha256=JMxXYTZhJkMNQZ2tTV6wId7wrvnWgiZteskACprKskM,1374
58
- ominfra/supervisor/supervisor.py,sha256=VAClZWVrZzZ6P0i6TIEKcyzI7WwWl5LRbnXngA4HPH4,13801
63
+ ominfra/supervisor/supervisor.py,sha256=GnG8SAhJ5UUay7cM3oBJYb4TmIUaQUdWWqnhANpalPw,13811
59
64
  ominfra/supervisor/types.py,sha256=ec62QG0CDJc0XNxCnf3lXxhsxrr4CCScLPI-1SpQjlc,1141
60
65
  ominfra/tailscale/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
61
66
  ominfra/tailscale/cli.py,sha256=Ltg6RVFsMLLPjLzoGwM6sxjmwjEVEYHAdrqmCc4N1HM,3174
62
67
  ominfra/tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
63
68
  ominfra/tools/listresources.py,sha256=L4t5rszm9ulcdWyr7n48_R9d5Etg4S2a4WQhlbHDtnQ,6106
64
- ominfra-0.0.0.dev77.dist-info/LICENSE,sha256=B_hVtavaA8zCYDW99DYdcpDLKz1n3BBRjZrcbv8uG8c,1451
65
- ominfra-0.0.0.dev77.dist-info/METADATA,sha256=F3x-psxct-590MisOe_YAKVshGjbDwiVnPHTs8CVYpw,799
66
- ominfra-0.0.0.dev77.dist-info/WHEEL,sha256=OVMc5UfuAQiSplgO0_WdW7vXVGAt9Hdd6qtN4HotdyA,91
67
- ominfra-0.0.0.dev77.dist-info/entry_points.txt,sha256=kgecQ2MgGrM9qK744BoKS3tMesaC3yjLnl9pa5CRczg,37
68
- ominfra-0.0.0.dev77.dist-info/top_level.txt,sha256=E-b2OHkk_AOBLXHYZQ2EOFKl-_6uOGd8EjeG-Zy6h_w,8
69
- ominfra-0.0.0.dev77.dist-info/RECORD,,
69
+ ominfra-0.0.0.dev79.dist-info/LICENSE,sha256=B_hVtavaA8zCYDW99DYdcpDLKz1n3BBRjZrcbv8uG8c,1451
70
+ ominfra-0.0.0.dev79.dist-info/METADATA,sha256=qHE4z-IH2J8uJL_TOTh54TNP6YYBUHZKzbci30YOnb8,799
71
+ ominfra-0.0.0.dev79.dist-info/WHEEL,sha256=OVMc5UfuAQiSplgO0_WdW7vXVGAt9Hdd6qtN4HotdyA,91
72
+ ominfra-0.0.0.dev79.dist-info/entry_points.txt,sha256=kgecQ2MgGrM9qK744BoKS3tMesaC3yjLnl9pa5CRczg,37
73
+ ominfra-0.0.0.dev79.dist-info/top_level.txt,sha256=E-b2OHkk_AOBLXHYZQ2EOFKl-_6uOGd8EjeG-Zy6h_w,8
74
+ ominfra-0.0.0.dev79.dist-info/RECORD,,