omlish 0.0.0.dev220__py3-none-any.whl → 0.0.0.dev222__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- omlish/__about__.py +2 -2
- omlish/algorithm/all.py +13 -0
- omlish/algorithm/distribute.py +46 -0
- omlish/algorithm/toposort.py +26 -0
- omlish/algorithm/unify.py +31 -0
- omlish/collections/__init__.py +0 -2
- omlish/collections/utils.py +0 -46
- omlish/graphs/trees.py +2 -1
- omlish/http/coro/server.py +42 -33
- omlish/http/{simple.py → coro/simple.py} +17 -17
- omlish/specs/irc/__init__.py +0 -0
- omlish/specs/irc/format/LICENSE +11 -0
- omlish/specs/irc/format/__init__.py +61 -0
- omlish/specs/irc/format/consts.py +6 -0
- omlish/specs/irc/format/errors.py +30 -0
- omlish/specs/irc/format/message.py +18 -0
- omlish/specs/irc/format/nuh.py +52 -0
- omlish/specs/irc/format/parsing.py +155 -0
- omlish/specs/irc/format/rendering.py +150 -0
- omlish/specs/irc/format/tags.py +99 -0
- omlish/specs/irc/format/utils.py +27 -0
- omlish/specs/irc/numerics/__init__.py +0 -0
- omlish/specs/irc/numerics/formats.py +94 -0
- omlish/specs/irc/numerics/numerics.py +808 -0
- omlish/specs/irc/numerics/types.py +59 -0
- {omlish-0.0.0.dev220.dist-info → omlish-0.0.0.dev222.dist-info}/METADATA +1 -1
- {omlish-0.0.0.dev220.dist-info → omlish-0.0.0.dev222.dist-info}/RECORD +32 -15
- omlish/docker/oci/data.py +0 -71
- omlish/docker/oci/media.py +0 -124
- /omlish/{docker/oci → algorithm}/__init__.py +0 -0
- {omlish-0.0.0.dev220.dist-info → omlish-0.0.0.dev222.dist-info}/LICENSE +0 -0
- {omlish-0.0.0.dev220.dist-info → omlish-0.0.0.dev222.dist-info}/WHEEL +0 -0
- {omlish-0.0.0.dev220.dist-info → omlish-0.0.0.dev222.dist-info}/entry_points.txt +0 -0
- {omlish-0.0.0.dev220.dist-info → omlish-0.0.0.dev222.dist-info}/top_level.txt +0 -0
omlish/__about__.py
CHANGED
omlish/algorithm/all.py
ADDED
@@ -0,0 +1,46 @@
|
|
1
|
+
# ruff: noqa: UP006 UP007
|
2
|
+
# @omlish-lite
|
3
|
+
import collections
|
4
|
+
import heapq
|
5
|
+
import typing as ta
|
6
|
+
|
7
|
+
|
8
|
+
T = ta.TypeVar('T')
|
9
|
+
|
10
|
+
|
11
|
+
def distribute_evenly(
|
12
|
+
items: ta.Iterable[ta.Tuple[T, float]],
|
13
|
+
n_bins: int,
|
14
|
+
) -> ta.List[ta.List[ta.Tuple[T, float]]]:
|
15
|
+
"""
|
16
|
+
Distribute items into n bins as evenly as possible in terms of total size.
|
17
|
+
- Sorting ensures larger items are placed first, preventing large leftover gaps in bins.
|
18
|
+
- A min-heap efficiently finds the least loaded bin in O(log n), keeping the distribution balanced.
|
19
|
+
- Each item is placed in the lightest bin, preventing a few bins from getting overloaded early.
|
20
|
+
|
21
|
+
:param items: List of tuples (item, size).
|
22
|
+
:param n_bins: Number of bins.
|
23
|
+
:return: List of n_bins lists, each containing items assigned to that bin.
|
24
|
+
"""
|
25
|
+
|
26
|
+
# Sort items by size in descending order
|
27
|
+
items_sorted = sorted(items, key=lambda x: x[1], reverse=True)
|
28
|
+
|
29
|
+
# Min-heap to track bin loads (size, index)
|
30
|
+
bins = [(0, i) for i in range(n_bins)] # (current size, bin index)
|
31
|
+
heapq.heapify(bins)
|
32
|
+
|
33
|
+
# Allocate items to bins
|
34
|
+
bin_contents = collections.defaultdict(list)
|
35
|
+
|
36
|
+
for item, size in items_sorted:
|
37
|
+
# Get the least loaded bin
|
38
|
+
bin_size, bin_index = heapq.heappop(bins)
|
39
|
+
|
40
|
+
# Assign item to this bin
|
41
|
+
bin_contents[bin_index].append((item, size))
|
42
|
+
|
43
|
+
# Update bin load and push back to heap
|
44
|
+
heapq.heappush(bins, (bin_size + size, bin_index)) # type: ignore
|
45
|
+
|
46
|
+
return [bin_contents[i] for i in range(n_bins)]
|
@@ -0,0 +1,26 @@
|
|
1
|
+
# ruff: noqa: UP006 UP007
|
2
|
+
# @omlish-lite
|
3
|
+
import functools
|
4
|
+
import typing as ta
|
5
|
+
|
6
|
+
|
7
|
+
T = ta.TypeVar('T')
|
8
|
+
|
9
|
+
|
10
|
+
def mut_toposort(data: ta.Dict[T, ta.Set[T]]) -> ta.Iterator[ta.Set[T]]:
|
11
|
+
for k, v in data.items():
|
12
|
+
v.discard(k)
|
13
|
+
extra_items_in_deps = functools.reduce(set.union, data.values()) - set(data.keys())
|
14
|
+
data.update({item: set() for item in extra_items_in_deps})
|
15
|
+
while True:
|
16
|
+
ordered = {item for item, dep in data.items() if not dep}
|
17
|
+
if not ordered:
|
18
|
+
break
|
19
|
+
yield ordered
|
20
|
+
data = {item: (dep - ordered) for item, dep in data.items() if item not in ordered}
|
21
|
+
if data:
|
22
|
+
raise ValueError('Cyclic dependencies exist among these items: ' + ' '.join(repr(x) for x in data.items()))
|
23
|
+
|
24
|
+
|
25
|
+
def toposort(data: ta.Mapping[T, ta.AbstractSet[T]]) -> ta.Iterator[ta.Set[T]]:
|
26
|
+
return mut_toposort({k: set(v) for k, v in data.items()})
|
@@ -0,0 +1,31 @@
|
|
1
|
+
import itertools
|
2
|
+
import typing as ta
|
3
|
+
|
4
|
+
|
5
|
+
T = ta.TypeVar('T')
|
6
|
+
|
7
|
+
|
8
|
+
def mut_unify_sets(sets: ta.Iterable[set[T]]) -> list[set[T]]:
|
9
|
+
rem: list[set[T]] = list(sets)
|
10
|
+
ret: list[set[T]] = []
|
11
|
+
while rem:
|
12
|
+
cur = rem.pop()
|
13
|
+
while True:
|
14
|
+
moved = False
|
15
|
+
for i in range(len(rem) - 1, -1, -1):
|
16
|
+
if any(e in cur for e in rem[i]):
|
17
|
+
cur.update(rem.pop(i))
|
18
|
+
moved = True
|
19
|
+
if not moved:
|
20
|
+
break
|
21
|
+
ret.append(cur)
|
22
|
+
if ret:
|
23
|
+
all_ = set(itertools.chain.from_iterable(ret))
|
24
|
+
num = sum(map(len, ret))
|
25
|
+
if len(all_) != num:
|
26
|
+
raise ValueError('Length mismatch')
|
27
|
+
return ret
|
28
|
+
|
29
|
+
|
30
|
+
def unify_sets(sets: ta.Iterable[ta.AbstractSet[T]]) -> list[set[T]]:
|
31
|
+
return mut_unify_sets([set(s) for s in sets])
|
omlish/collections/__init__.py
CHANGED
omlish/collections/utils.py
CHANGED
@@ -1,8 +1,5 @@
|
|
1
|
-
import functools
|
2
|
-
import itertools
|
3
1
|
import typing as ta
|
4
2
|
|
5
|
-
from .. import check
|
6
3
|
from .. import lang
|
7
4
|
from .exceptions import DuplicateKeyError
|
8
5
|
from .identity import IdentityKeyDict
|
@@ -17,28 +14,6 @@ V = ta.TypeVar('V')
|
|
17
14
|
##
|
18
15
|
|
19
16
|
|
20
|
-
def mut_toposort(data: dict[T, set[T]]) -> ta.Iterator[set[T]]:
|
21
|
-
for k, v in data.items():
|
22
|
-
v.discard(k)
|
23
|
-
extra_items_in_deps = functools.reduce(set.union, data.values()) - set(data.keys())
|
24
|
-
data.update({item: set() for item in extra_items_in_deps})
|
25
|
-
while True:
|
26
|
-
ordered = {item for item, dep in data.items() if not dep}
|
27
|
-
if not ordered:
|
28
|
-
break
|
29
|
-
yield ordered
|
30
|
-
data = {item: (dep - ordered) for item, dep in data.items() if item not in ordered}
|
31
|
-
if data:
|
32
|
-
raise ValueError('Cyclic dependencies exist among these items: ' + ' '.join(repr(x) for x in data.items()))
|
33
|
-
|
34
|
-
|
35
|
-
def toposort(data: ta.Mapping[T, ta.AbstractSet[T]]) -> ta.Iterator[set[T]]:
|
36
|
-
return mut_toposort({k: set(v) for k, v in data.items()})
|
37
|
-
|
38
|
-
|
39
|
-
##
|
40
|
-
|
41
|
-
|
42
17
|
class PartitionResult(ta.NamedTuple, ta.Generic[T]):
|
43
18
|
t: list[T]
|
44
19
|
f: list[T]
|
@@ -153,24 +128,3 @@ def key_cmp(fn: ta.Callable[[K, K], int]) -> ta.Callable[[tuple[K, V], tuple[K,
|
|
153
128
|
|
154
129
|
def indexes(it: ta.Iterable[T]) -> dict[T, int]:
|
155
130
|
return {e: i for i, e in enumerate(it)}
|
156
|
-
|
157
|
-
|
158
|
-
def mut_unify_sets(sets: ta.Iterable[set[T]]) -> list[set[T]]:
|
159
|
-
rem: list[set[T]] = list(sets)
|
160
|
-
ret: list[set[T]] = []
|
161
|
-
while rem:
|
162
|
-
cur = rem.pop()
|
163
|
-
while True:
|
164
|
-
moved = False
|
165
|
-
for i in range(len(rem) - 1, -1, -1):
|
166
|
-
if any(e in cur for e in rem[i]):
|
167
|
-
cur.update(rem.pop(i))
|
168
|
-
moved = True
|
169
|
-
if not moved:
|
170
|
-
break
|
171
|
-
ret.append(cur)
|
172
|
-
if ret:
|
173
|
-
all_ = set(itertools.chain.from_iterable(ret))
|
174
|
-
num = sum(map(len, ret))
|
175
|
-
check.equal(len(all_), num)
|
176
|
-
return ret
|
omlish/graphs/trees.py
CHANGED
@@ -10,6 +10,7 @@ from .. import cached
|
|
10
10
|
from .. import check
|
11
11
|
from .. import collections as col
|
12
12
|
from .. import lang
|
13
|
+
from ..algorithm import all as alg
|
13
14
|
|
14
15
|
|
15
16
|
T = ta.TypeVar('T')
|
@@ -194,7 +195,7 @@ class BasicTreeAnalysis(ta.Generic[NodeT]):
|
|
194
195
|
else:
|
195
196
|
e, d = lang.identity, lang.identity
|
196
197
|
tsd = {e(n): {e(p)} for n, p in parents_by_node.items()}
|
197
|
-
ts = list(
|
198
|
+
ts = list(alg.mut_toposort(tsd))
|
198
199
|
root = d(check.single(ts[0]))
|
199
200
|
|
200
201
|
return cls(
|
omlish/http/coro/server.py
CHANGED
@@ -421,50 +421,59 @@ class CoroHttpServer:
|
|
421
421
|
#
|
422
422
|
|
423
423
|
def coro_handle(self) -> ta.Generator[Io, ta.Optional[bytes], None]:
|
424
|
-
|
425
|
-
gen = self.coro_handle_one()
|
424
|
+
return self._coro_run_handler(self._coro_handle_one())
|
426
425
|
|
427
|
-
|
428
|
-
|
429
|
-
|
430
|
-
|
431
|
-
|
432
|
-
|
433
|
-
|
426
|
+
def _coro_run_handler(
|
427
|
+
self,
|
428
|
+
gen: ta.Generator[
|
429
|
+
ta.Union[AnyLogIo, AnyReadIo, _Response],
|
430
|
+
ta.Optional[bytes],
|
431
|
+
None,
|
432
|
+
],
|
433
|
+
) -> ta.Generator[Io, ta.Optional[bytes], None]:
|
434
|
+
i: ta.Optional[bytes]
|
435
|
+
o: ta.Any = next(gen)
|
436
|
+
while True:
|
437
|
+
try:
|
438
|
+
if isinstance(o, self.AnyLogIo):
|
439
|
+
i = None
|
440
|
+
yield o
|
434
441
|
|
435
|
-
|
436
|
-
|
442
|
+
elif isinstance(o, self.AnyReadIo):
|
443
|
+
i = check.isinstance((yield o), bytes)
|
437
444
|
|
438
|
-
|
439
|
-
|
445
|
+
elif isinstance(o, self._Response):
|
446
|
+
i = None
|
440
447
|
|
441
|
-
|
442
|
-
|
443
|
-
|
448
|
+
r = self._preprocess_response(o)
|
449
|
+
hb = self._build_response_head_bytes(r)
|
450
|
+
check.none((yield self.WriteIo(hb)))
|
444
451
|
|
445
|
-
|
446
|
-
|
452
|
+
for b in self._yield_response_data(r):
|
453
|
+
yield self.WriteIo(b)
|
447
454
|
|
448
|
-
|
449
|
-
|
455
|
+
o.close()
|
456
|
+
if o.close_connection:
|
457
|
+
break
|
458
|
+
o = None
|
450
459
|
|
451
|
-
|
452
|
-
|
460
|
+
else:
|
461
|
+
raise TypeError(o) # noqa
|
453
462
|
|
454
|
-
|
455
|
-
|
456
|
-
|
457
|
-
|
458
|
-
|
459
|
-
|
463
|
+
try:
|
464
|
+
o = gen.send(i)
|
465
|
+
except EOFError:
|
466
|
+
return
|
467
|
+
except StopIteration:
|
468
|
+
break
|
460
469
|
|
461
|
-
|
462
|
-
|
463
|
-
|
470
|
+
except Exception: # noqa
|
471
|
+
if hasattr(o, 'close'):
|
472
|
+
o.close()
|
464
473
|
|
465
|
-
|
474
|
+
raise
|
466
475
|
|
467
|
-
def
|
476
|
+
def _coro_handle_one(self) -> ta.Generator[
|
468
477
|
ta.Union[AnyLogIo, AnyReadIo, _Response],
|
469
478
|
ta.Optional[bytes],
|
470
479
|
None,
|
@@ -5,23 +5,23 @@ import contextlib
|
|
5
5
|
import functools
|
6
6
|
import typing as ta
|
7
7
|
|
8
|
-
from
|
9
|
-
from
|
10
|
-
from
|
11
|
-
from
|
12
|
-
from
|
13
|
-
from
|
14
|
-
from
|
15
|
-
from
|
16
|
-
from
|
17
|
-
from
|
18
|
-
from
|
19
|
-
from
|
20
|
-
from
|
21
|
-
from
|
22
|
-
from
|
23
|
-
from .
|
24
|
-
from .
|
8
|
+
from ...lite.check import check
|
9
|
+
from ...sockets.addresses import SocketAndAddress
|
10
|
+
from ...sockets.bind import CanSocketBinder
|
11
|
+
from ...sockets.bind import SocketBinder
|
12
|
+
from ...sockets.server.handlers import ExecutorSocketServerHandler
|
13
|
+
from ...sockets.server.handlers import SocketHandlerSocketServerHandler
|
14
|
+
from ...sockets.server.handlers import SocketServerHandler
|
15
|
+
from ...sockets.server.handlers import SocketWrappingSocketServerHandler
|
16
|
+
from ...sockets.server.handlers import StandardSocketServerHandler
|
17
|
+
from ...sockets.server.server import SocketServer
|
18
|
+
from ...sockets.server.threading import ThreadingSocketServerHandler
|
19
|
+
from ..handlers import HttpHandler
|
20
|
+
from ..parsing import HttpRequestParser
|
21
|
+
from ..versions import HttpProtocolVersion
|
22
|
+
from ..versions import HttpProtocolVersions
|
23
|
+
from .server import CoroHttpServer
|
24
|
+
from .server import CoroHttpServerSocketHandler
|
25
25
|
|
26
26
|
|
27
27
|
if ta.TYPE_CHECKING:
|
File without changes
|
@@ -0,0 +1,11 @@
|
|
1
|
+
Copyright (c) 2016-2021 Daniel Oaks
|
2
|
+
Copyright (c) 2018-2021 Shivaram Lingamneni
|
3
|
+
|
4
|
+
Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted,
|
5
|
+
provided that the above copyright notice and this permission notice appear in all copies.
|
6
|
+
|
7
|
+
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
|
8
|
+
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
9
|
+
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
10
|
+
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
|
11
|
+
THIS SOFTWARE.
|
@@ -0,0 +1,61 @@
|
|
1
|
+
# Copyright (c) 2016-2021 Daniel Oaks
|
2
|
+
# Copyright (c) 2018-2021 Shivaram Lingamneni
|
3
|
+
#
|
4
|
+
# Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby
|
5
|
+
# granted, provided that the above copyright notice and this permission notice appear in all copies.
|
6
|
+
#
|
7
|
+
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
|
8
|
+
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
9
|
+
# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN
|
10
|
+
# AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
11
|
+
# PERFORMANCE OF THIS SOFTWARE.
|
12
|
+
#
|
13
|
+
# https://github.com/ergochat/irc-go/blob/9beac2d29dc5f998c5a53e5db7a6426d7d083a79/ircmsg/
|
14
|
+
from .consts import ( # noqa
|
15
|
+
MAX_LEN_CLIENT_TAG_DATA,
|
16
|
+
MAX_LEN_SERVER_TAG_DATA,
|
17
|
+
MAX_LEN_TAGS,
|
18
|
+
MAX_LEN_TAGS_FROM_CLIENT,
|
19
|
+
MAX_LEN_TAG_DATA,
|
20
|
+
)
|
21
|
+
|
22
|
+
from .errors import ( # noqa
|
23
|
+
BadCharactersError,
|
24
|
+
BadParamError,
|
25
|
+
CommandMissingError,
|
26
|
+
Error,
|
27
|
+
InvalidTagContentError,
|
28
|
+
LineEmptyError,
|
29
|
+
MalformedNuhError,
|
30
|
+
TagsTooLongError,
|
31
|
+
)
|
32
|
+
|
33
|
+
from .message import ( # noqa
|
34
|
+
Message,
|
35
|
+
)
|
36
|
+
|
37
|
+
from .nuh import ( # noqa
|
38
|
+
Nuh,
|
39
|
+
)
|
40
|
+
|
41
|
+
from .parsing import ( # noqa
|
42
|
+
ParsedLine,
|
43
|
+
parse_line,
|
44
|
+
parse_line_,
|
45
|
+
parse_line_strict,
|
46
|
+
)
|
47
|
+
|
48
|
+
from .rendering import ( # noqa
|
49
|
+
RenderedLine,
|
50
|
+
render_line,
|
51
|
+
render_line_,
|
52
|
+
render_line_strict,
|
53
|
+
)
|
54
|
+
|
55
|
+
from .tags import ( # noqa
|
56
|
+
escape_tag_value,
|
57
|
+
parse_tags,
|
58
|
+
unescape_tag_value,
|
59
|
+
validate_tag_name,
|
60
|
+
validate_tag_value,
|
61
|
+
)
|
@@ -0,0 +1,30 @@
|
|
1
|
+
class Error(Exception):
|
2
|
+
pass
|
3
|
+
|
4
|
+
|
5
|
+
class LineEmptyError(Error):
|
6
|
+
pass
|
7
|
+
|
8
|
+
|
9
|
+
class BadCharactersError(Error):
|
10
|
+
pass
|
11
|
+
|
12
|
+
|
13
|
+
class TagsTooLongError(Error):
|
14
|
+
pass
|
15
|
+
|
16
|
+
|
17
|
+
class InvalidTagContentError(Error):
|
18
|
+
pass
|
19
|
+
|
20
|
+
|
21
|
+
class CommandMissingError(Error):
|
22
|
+
pass
|
23
|
+
|
24
|
+
|
25
|
+
class BadParamError(Error):
|
26
|
+
pass
|
27
|
+
|
28
|
+
|
29
|
+
class MalformedNuhError(Error):
|
30
|
+
pass
|
@@ -0,0 +1,18 @@
|
|
1
|
+
"""
|
2
|
+
TODO:
|
3
|
+
- https://github.com/ergochat/irctest/blob/master/irctest/irc_utils/message_parser.py ?
|
4
|
+
"""
|
5
|
+
import dataclasses as dc
|
6
|
+
import typing as ta
|
7
|
+
|
8
|
+
|
9
|
+
@dc.dataclass(frozen=True)
|
10
|
+
class Message:
|
11
|
+
source: str | None
|
12
|
+
command: str
|
13
|
+
params: ta.Sequence[str]
|
14
|
+
|
15
|
+
force_trailing: bool = False
|
16
|
+
|
17
|
+
tags: ta.Mapping[str, str] | None = None
|
18
|
+
client_only_tags: ta.Mapping[str, str] | None = None
|
@@ -0,0 +1,52 @@
|
|
1
|
+
import dataclasses as dc
|
2
|
+
import typing as ta
|
3
|
+
|
4
|
+
from .errors import MalformedNuhError
|
5
|
+
|
6
|
+
|
7
|
+
@dc.dataclass(frozen=True)
|
8
|
+
class Nuh:
|
9
|
+
name: str | None = None
|
10
|
+
user: str | None = None
|
11
|
+
host: str | None = None
|
12
|
+
|
13
|
+
@property
|
14
|
+
def tuple(self) -> tuple[str | None, ...]:
|
15
|
+
return (self.name, self.user, self.host)
|
16
|
+
|
17
|
+
def __iter__(self) -> ta.Iterator[str | None]:
|
18
|
+
return iter(self.tuple)
|
19
|
+
|
20
|
+
@classmethod
|
21
|
+
def parse(cls, inp: str) -> 'Nuh':
|
22
|
+
if not inp:
|
23
|
+
raise MalformedNuhError
|
24
|
+
|
25
|
+
host: str | None = None
|
26
|
+
host_start = inp.find('@')
|
27
|
+
if host_start != -1:
|
28
|
+
host = inp[host_start + 1:]
|
29
|
+
inp = inp[:host_start]
|
30
|
+
|
31
|
+
user_start = inp.find('!')
|
32
|
+
user: str | None = None
|
33
|
+
if user_start != -1:
|
34
|
+
user = inp[user_start + 1:]
|
35
|
+
inp = inp[:user_start]
|
36
|
+
|
37
|
+
return cls(
|
38
|
+
name=inp or None,
|
39
|
+
user=user,
|
40
|
+
host=host,
|
41
|
+
)
|
42
|
+
|
43
|
+
@property
|
44
|
+
def canonical(self) -> str:
|
45
|
+
parts = []
|
46
|
+
if (n := self.name) is not None:
|
47
|
+
parts.append(n)
|
48
|
+
if (u := self.user) is not None:
|
49
|
+
parts.append(f'!{u}')
|
50
|
+
if (h := self.host) is not None:
|
51
|
+
parts.append(f'@{h}')
|
52
|
+
return ''.join(parts)
|