omlish 0.0.0.dev472__py3-none-any.whl → 0.0.0.dev474__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of omlish might be problematic. Click here for more details.

Files changed (37) hide show
  1. omlish/__about__.py +3 -3
  2. omlish/collections/__init__.py +4 -0
  3. omlish/collections/attrregistry.py +32 -4
  4. omlish/diag/cmds/__init__.py +0 -0
  5. omlish/diag/{lslocks.py → cmds/lslocks.py} +6 -6
  6. omlish/diag/{lsof.py → cmds/lsof.py} +6 -6
  7. omlish/diag/{ps.py → cmds/ps.py} +6 -6
  8. omlish/dispatch/__init__.py +18 -12
  9. omlish/formats/json/stream/__init__.py +13 -0
  10. omlish/http/clients/asyncs.py +11 -17
  11. omlish/http/clients/coro/sync.py +3 -2
  12. omlish/http/clients/default.py +2 -2
  13. omlish/http/clients/executor.py +8 -2
  14. omlish/http/clients/httpx.py +29 -46
  15. omlish/http/clients/sync.py +11 -17
  16. omlish/http/clients/syncasync.py +8 -2
  17. omlish/http/clients/urllib.py +2 -1
  18. omlish/io/buffers.py +115 -0
  19. omlish/io/readers.py +29 -0
  20. omlish/lite/contextmanagers.py +4 -4
  21. omlish/os/pidfiles/pinning.py +2 -2
  22. omlish/text/docwrap/__init__.py +3 -0
  23. omlish/text/docwrap/api.py +77 -0
  24. omlish/text/docwrap/groups.py +84 -0
  25. omlish/text/docwrap/lists.py +167 -0
  26. omlish/text/docwrap/parts.py +139 -0
  27. omlish/text/docwrap/reflowing.py +103 -0
  28. omlish/text/docwrap/rendering.py +142 -0
  29. omlish/text/docwrap/utils.py +11 -0
  30. omlish/text/docwrap/wrapping.py +59 -0
  31. omlish/text/textwrap.py +51 -0
  32. {omlish-0.0.0.dev472.dist-info → omlish-0.0.0.dev474.dist-info}/METADATA +7 -6
  33. {omlish-0.0.0.dev472.dist-info → omlish-0.0.0.dev474.dist-info}/RECORD +37 -25
  34. {omlish-0.0.0.dev472.dist-info → omlish-0.0.0.dev474.dist-info}/WHEEL +0 -0
  35. {omlish-0.0.0.dev472.dist-info → omlish-0.0.0.dev474.dist-info}/entry_points.txt +0 -0
  36. {omlish-0.0.0.dev472.dist-info → omlish-0.0.0.dev474.dist-info}/licenses/LICENSE +0 -0
  37. {omlish-0.0.0.dev472.dist-info → omlish-0.0.0.dev474.dist-info}/top_level.txt +0 -0
omlish/io/buffers.py CHANGED
@@ -1,10 +1,18 @@
1
1
  # ruff: noqa: UP006 UP007 UP043 UP045
2
2
  # @omlish-lite
3
+ """
4
+ TODO:
5
+ - overhaul and just coro-ify pyio?
6
+ """
3
7
  import io
4
8
  import typing as ta
5
9
 
6
10
  from ..lite.attrops import attr_repr
7
11
  from ..lite.check import check
12
+ from .readers import AsyncBufferedBytesReader
13
+ from .readers import AsyncRawBytesReader
14
+ from .readers import BufferedBytesReader
15
+ from .readers import RawBytesReader
8
16
 
9
17
 
10
18
  ##
@@ -183,6 +191,9 @@ class ReadableListBuffer:
183
191
 
184
192
  self._lst: list[bytes] = []
185
193
 
194
+ def __bool__(self) -> ta.NoReturn:
195
+ raise TypeError("Use 'buf is not None' or 'len(buf)'.")
196
+
186
197
  def __len__(self) -> int:
187
198
  return sum(map(len, self._lst))
188
199
 
@@ -249,6 +260,110 @@ class ReadableListBuffer:
249
260
  r = self.read_until_(delim)
250
261
  return r if isinstance(r, bytes) else None
251
262
 
263
+ #
264
+
265
+ DEFAULT_BUFFERED_READER_CHUNK_SIZE: ta.ClassVar[int] = -1
266
+
267
+ @ta.final
268
+ class _BufferedBytesReader(BufferedBytesReader):
269
+ def __init__(
270
+ self,
271
+ raw: RawBytesReader,
272
+ buf: 'ReadableListBuffer',
273
+ *,
274
+ chunk_size: ta.Optional[int] = None,
275
+ ) -> None:
276
+ self._raw = raw
277
+ self._buf = buf
278
+ self._chunk_size = chunk_size or ReadableListBuffer.DEFAULT_BUFFERED_READER_CHUNK_SIZE
279
+
280
+ def read1(self, n: int = -1, /) -> bytes:
281
+ if n < 0:
282
+ n = self._chunk_size
283
+ if not n:
284
+ return b''
285
+ if 0 < n <= len(self._buf):
286
+ return self._buf.read(n) or b''
287
+ return self._raw.read1(n)
288
+
289
+ def read(self, /, n: int = -1) -> bytes:
290
+ if n < 0:
291
+ return self.readall()
292
+ while len(self._buf) < n:
293
+ if not (b := self._raw.read1(n)):
294
+ break
295
+ self._buf.feed(b)
296
+ return self._buf.read(n) or b''
297
+
298
+ def readall(self) -> bytes:
299
+ buf = io.BytesIO()
300
+ buf.write(self._buf.read() or b'')
301
+ while (b := self._raw.read1(self._chunk_size)):
302
+ buf.write(b)
303
+ return buf.getvalue()
304
+
305
+ def new_buffered_reader(
306
+ self,
307
+ raw: RawBytesReader,
308
+ *,
309
+ chunk_size: ta.Optional[int] = None,
310
+ ) -> BufferedBytesReader:
311
+ return self._BufferedBytesReader(
312
+ raw,
313
+ self,
314
+ chunk_size=chunk_size,
315
+ )
316
+
317
+ @ta.final
318
+ class _AsyncBufferedBytesReader(AsyncBufferedBytesReader):
319
+ def __init__(
320
+ self,
321
+ raw: AsyncRawBytesReader,
322
+ buf: 'ReadableListBuffer',
323
+ *,
324
+ chunk_size: ta.Optional[int] = None,
325
+ ) -> None:
326
+ self._raw = raw
327
+ self._buf = buf
328
+ self._chunk_size = chunk_size or ReadableListBuffer.DEFAULT_BUFFERED_READER_CHUNK_SIZE
329
+
330
+ async def read1(self, n: int = -1, /) -> bytes:
331
+ if n < 0:
332
+ n = self._chunk_size
333
+ if not n:
334
+ return b''
335
+ if 0 < n <= len(self._buf):
336
+ return self._buf.read(n) or b''
337
+ return await self._raw.read1(n)
338
+
339
+ async def read(self, /, n: int = -1) -> bytes:
340
+ if n < 0:
341
+ return await self.readall()
342
+ while len(self._buf) < n:
343
+ if not (b := await self._raw.read1(n)):
344
+ break
345
+ self._buf.feed(b)
346
+ return self._buf.read(n) or b''
347
+
348
+ async def readall(self) -> bytes:
349
+ buf = io.BytesIO()
350
+ buf.write(self._buf.read() or b'')
351
+ while b := await self._raw.read1(self._chunk_size):
352
+ buf.write(b)
353
+ return buf.getvalue()
354
+
355
+ def new_async_buffered_reader(
356
+ self,
357
+ raw: AsyncRawBytesReader,
358
+ *,
359
+ chunk_size: ta.Optional[int] = None,
360
+ ) -> AsyncBufferedBytesReader:
361
+ return self._AsyncBufferedBytesReader(
362
+ raw,
363
+ self,
364
+ chunk_size=chunk_size,
365
+ )
366
+
252
367
 
253
368
  ##
254
369
 
omlish/io/readers.py ADDED
@@ -0,0 +1,29 @@
1
+ # ruff: noqa: UP045
2
+ # @omlish-lite
3
+ import typing as ta
4
+
5
+
6
+ ##
7
+
8
+
9
+ class RawBytesReader(ta.Protocol):
10
+ def read1(self, n: int = -1, /) -> bytes: ...
11
+
12
+
13
+ class BufferedBytesReader(RawBytesReader, ta.Protocol):
14
+ def read(self, n: int = -1, /) -> bytes: ...
15
+
16
+ def readall(self) -> bytes: ...
17
+
18
+
19
+ #
20
+
21
+
22
+ class AsyncRawBytesReader(ta.Protocol):
23
+ def read1(self, n: int = -1, /) -> ta.Awaitable[bytes]: ...
24
+
25
+
26
+ class AsyncBufferedBytesReader(AsyncRawBytesReader, ta.Protocol):
27
+ def read(self, n: int = -1, /) -> ta.Awaitable[bytes]: ...
28
+
29
+ def readall(self) -> ta.Awaitable[bytes]: ...
@@ -54,7 +54,7 @@ class ExitStacked:
54
54
  es.__enter__()
55
55
  try:
56
56
  self._enter_contexts()
57
- except Exception: # noqa
57
+ except BaseException: # noqa
58
58
  es.__exit__(*sys.exc_info())
59
59
  raise
60
60
  return self
@@ -65,7 +65,7 @@ class ExitStacked:
65
65
  return None
66
66
  try:
67
67
  self._exit_contexts()
68
- except Exception: # noqa
68
+ except BaseException: # noqa
69
69
  es.__exit__(*sys.exc_info())
70
70
  raise
71
71
  return es.__exit__(exc_type, exc_val, exc_tb)
@@ -113,7 +113,7 @@ class AsyncExitStacked:
113
113
  await es.__aenter__()
114
114
  try:
115
115
  await self._async_enter_contexts()
116
- except Exception: # noqa
116
+ except BaseException: # noqa
117
117
  await es.__aexit__(*sys.exc_info())
118
118
  raise
119
119
  return self
@@ -124,7 +124,7 @@ class AsyncExitStacked:
124
124
  return None
125
125
  try:
126
126
  await self._async_exit_contexts()
127
- except Exception: # noqa
127
+ except BaseException: # noqa
128
128
  await es.__aexit__(*sys.exc_info())
129
129
  raise
130
130
  return await es.__aexit__(exc_type, exc_val, exc_tb)
@@ -25,8 +25,8 @@ import sys
25
25
  import time
26
26
  import typing as ta
27
27
 
28
- from ...diag.lslocks import LslocksCommand
29
- from ...diag.lsof import LsofCommand
28
+ from ...diag.cmds.lslocks import LslocksCommand
29
+ from ...diag.cmds.lsof import LsofCommand
30
30
  from ...lite.abstract import Abstract
31
31
  from ...lite.check import check
32
32
  from ...lite.timeouts import Timeout
@@ -0,0 +1,3 @@
1
+ from .api import ( # noqa
2
+ docwrap,
3
+ )
@@ -0,0 +1,77 @@
1
+ import dataclasses as dc
2
+ import typing as ta
3
+
4
+ from ..textwrap import TextwrapOpts
5
+ from .groups import group_indents
6
+ from .lists import ListBuilder
7
+ from .parts import Part
8
+ from .parts import build_root
9
+ from .reflowing import TextwrapReflower
10
+ from .reflowing import reflow_block_text
11
+
12
+
13
+ ##
14
+
15
+
16
+ DEFAULT_TAB_WIDTH: int = 4
17
+
18
+
19
+ def replace_tabs(s: str, tab_width: int | None = None) -> str:
20
+ if tab_width is None:
21
+ tab_width = DEFAULT_TAB_WIDTH
22
+ return s.replace('\t', ' ' * tab_width)
23
+
24
+
25
+ ##
26
+
27
+
28
+ def parse(
29
+ s: str,
30
+ *,
31
+ tab_width: int | None = None,
32
+ allow_improper_list_children: bool | ta.Literal['lists_only'] | None = None,
33
+ ) -> Part:
34
+ s = replace_tabs(
35
+ s,
36
+ tab_width=tab_width,
37
+ )
38
+
39
+ root = build_root(s)
40
+
41
+ root = group_indents(root)
42
+
43
+ root = ListBuilder(
44
+ allow_improper_children=allow_improper_list_children,
45
+ ).build_lists(root)
46
+
47
+ return root
48
+
49
+
50
+ ##
51
+
52
+
53
+ def docwrap(
54
+ s: str,
55
+ *,
56
+ width: int | None = None,
57
+ textwrap: TextwrapOpts | ta.Mapping[str, ta.Any] | None = None,
58
+ allow_improper_list_children: bool | ta.Literal['lists_only'] = False,
59
+ ) -> Part:
60
+ if isinstance(textwrap, ta.Mapping):
61
+ textwrap = TextwrapOpts(**textwrap)
62
+ elif textwrap is None:
63
+ textwrap = TextwrapOpts()
64
+ if width is not None:
65
+ textwrap = dc.replace(textwrap, width=width)
66
+
67
+ root = parse(
68
+ s,
69
+ allow_improper_list_children=allow_improper_list_children,
70
+ )
71
+
72
+ root = reflow_block_text(
73
+ root,
74
+ TextwrapReflower(opts=textwrap),
75
+ )
76
+
77
+ return root
@@ -0,0 +1,84 @@
1
+ import typing as ta
2
+
3
+ from ... import check
4
+ from ... import dataclasses as dc
5
+ from .parts import Blank
6
+ from .parts import Block
7
+ from .parts import Indent
8
+ from .parts import Part
9
+ from .parts import Text
10
+ from .parts import blockify
11
+
12
+
13
+ ##
14
+
15
+
16
+ @dc.dataclass()
17
+ @dc.extra_class_params(default_repr_fn=dc.truthy_repr)
18
+ class _IndentGroup:
19
+ n: int
20
+ cs: list[ta.Union[Blank, Text, '_IndentGroup']] = dc.field(default_factory=list)
21
+
22
+
23
+ def group_indents(root: Part) -> Part:
24
+ rg = _IndentGroup(0)
25
+ stk: list[_IndentGroup] = [rg]
26
+
27
+ for p in (root.ps if isinstance(root, Block) else [root]):
28
+ if isinstance(p, Blank):
29
+ stk[-1].cs.append(p)
30
+ continue
31
+
32
+ n: int
33
+ t: Text
34
+ if isinstance(p, Text):
35
+ n, t = 0, p
36
+ elif isinstance(p, Indent):
37
+ n = p.n
38
+ t = check.isinstance(p.p, Text)
39
+ else:
40
+ raise TypeError(p)
41
+
42
+ while n < stk[-1].n:
43
+ stk.pop()
44
+
45
+ if n > stk[-1].n:
46
+ nxt = _IndentGroup(n=n, cs=[t])
47
+ stk[-1].cs.append(nxt)
48
+ stk.append(nxt)
49
+
50
+ else:
51
+ check.state(stk[-1].n == n)
52
+ stk[-1].cs.append(t)
53
+
54
+ #
55
+
56
+ def relativize(g: '_IndentGroup') -> None:
57
+ for c in g.cs:
58
+ if isinstance(c, _IndentGroup):
59
+ check.state(c.n > g.n)
60
+ relativize(c)
61
+ c.n -= g.n
62
+
63
+ relativize(rg)
64
+
65
+ #
66
+
67
+ def convert(g: '_IndentGroup') -> Part:
68
+ if g.n < 1:
69
+ check.state(g is rg)
70
+
71
+ lst: list[Part] = []
72
+ for c in g.cs:
73
+ if isinstance(c, (Blank, Text)):
74
+ lst.append(c)
75
+
76
+ elif isinstance(c, _IndentGroup):
77
+ lst.append(Indent(c.n, convert(c))) # type: ignore[arg-type]
78
+
79
+ else:
80
+ raise TypeError(c)
81
+
82
+ return blockify(*lst)
83
+
84
+ return convert(rg)
@@ -0,0 +1,167 @@
1
+ """
2
+ TODO:
3
+ - numeric lettered lists (even unordered) (with separator - `1)` / `1:` / ...)
4
+ """
5
+ import typing as ta
6
+
7
+ from ... import check
8
+ from .parts import Blank
9
+ from .parts import Block
10
+ from .parts import Indent
11
+ from .parts import List
12
+ from .parts import Part
13
+ from .parts import Text
14
+ from .parts import blockify
15
+ from .utils import all_same
16
+
17
+
18
+ ##
19
+
20
+
21
+ class ListBuilder:
22
+ DEFAULT_ALLOW_IMPROPER_CHILDREN: ta.ClassVar[bool | ta.Literal['lists_only']] = False
23
+ DEFAULT_LIST_PREFIXES: ta.ClassVar[ta.Sequence[str]] = ['*', '-']
24
+
25
+ def __init__(
26
+ self,
27
+ *,
28
+ list_prefixes: ta.Iterable[str] | None = None,
29
+ allow_improper_children: bool | ta.Literal['lists_only'] | None = None,
30
+ ) -> None:
31
+ super().__init__()
32
+
33
+ if list_prefixes is None:
34
+ list_prefixes = self.DEFAULT_LIST_PREFIXES
35
+ self._list_prefixes = set(check.not_isinstance(list_prefixes, str))
36
+ if allow_improper_children is None:
37
+ allow_improper_children = self.DEFAULT_ALLOW_IMPROPER_CHILDREN
38
+ self._allow_improper_children = allow_improper_children
39
+
40
+ self._len_sorted_list_prefixes = sorted(self._list_prefixes, key=len, reverse=True)
41
+
42
+ #
43
+
44
+ def _should_promote_indent_child(self, p: Indent) -> bool:
45
+ ac = self._allow_improper_children
46
+ if isinstance(ac, bool):
47
+ return ac
48
+ elif ac == 'lists_only':
49
+ return isinstance(p.p, List)
50
+ else:
51
+ raise TypeError(ac)
52
+
53
+ #
54
+
55
+ class _DetectedList(ta.NamedTuple):
56
+ pfx: str
57
+ ofs: int
58
+ len: int
59
+
60
+ def _detect_list(self, ps: ta.Sequence[Part], st: int = 0) -> _DetectedList | None:
61
+ if not ps:
62
+ return None
63
+
64
+ for lp in self._len_sorted_list_prefixes:
65
+ sp = lp + ' '
66
+
67
+ mo = -1
68
+ n = st
69
+ while n < len(ps):
70
+ p = ps[n]
71
+
72
+ if isinstance(p, (Blank, Text)):
73
+ if isinstance(p, Text):
74
+ if p.s.startswith(sp):
75
+ if mo < 0:
76
+ mo = n
77
+ elif mo >= 0:
78
+ break
79
+
80
+ elif isinstance(p, Indent):
81
+ if mo >= 0 and p.n < len(sp):
82
+ if not self._should_promote_indent_child(p):
83
+ break
84
+
85
+ elif isinstance(p, List):
86
+ if mo >= 0:
87
+ break
88
+
89
+ else:
90
+ raise TypeError(p)
91
+
92
+ n += 1
93
+
94
+ if mo >= 0:
95
+ return ListBuilder._DetectedList(lp, mo, n - mo)
96
+
97
+ return None
98
+
99
+ def _build_list(self, lp: str, ps: ta.Sequence[Part]) -> List:
100
+ sp = lp + ' '
101
+
102
+ new: list[list[Part]] = []
103
+
104
+ f = check.isinstance(ps[0], Text)
105
+ check.state(f.s.startswith(sp))
106
+ new.append([Text(f.s[len(sp):])])
107
+ del f
108
+
109
+ for i in range(1, len(ps)):
110
+ p = ps[i]
111
+
112
+ if isinstance(p, Blank):
113
+ new[-1].append(p)
114
+
115
+ elif isinstance(p, Text):
116
+ check.state(p.s.startswith(sp))
117
+ new.append([Text(p.s[len(sp):])])
118
+
119
+ elif isinstance(p, Indent):
120
+ if p.n < len(sp):
121
+ check.state(self._should_promote_indent_child(p))
122
+ p = Indent(len(sp), p.p)
123
+
124
+ if p.n == len(sp):
125
+ new[-1].append(p.p)
126
+
127
+ else:
128
+ raise NotImplementedError
129
+
130
+ else:
131
+ raise TypeError(p)
132
+
133
+ #
134
+
135
+ return List(lp, [blockify(*x) for x in new])
136
+
137
+ def build_lists(self, root: Part) -> Part:
138
+ def rec(p: Part) -> Part: # noqa
139
+ if isinstance(p, Block):
140
+ new = [rec(c) for c in p.ps]
141
+ if not all_same(new, p.ps):
142
+ return rec(blockify(*new))
143
+
144
+ st = 0
145
+ diff = False
146
+ while (dl := self._detect_list(new, st)) is not None:
147
+ diff = True
148
+ ln = self._build_list(dl.pfx, new[dl.ofs:dl.ofs + dl.len])
149
+ new[dl.ofs:dl.ofs + dl.len] = [ln]
150
+ st = dl.ofs + 1
151
+
152
+ if diff:
153
+ p = blockify(*new)
154
+ return p
155
+
156
+ elif isinstance(p, Indent):
157
+ if (n := rec(p.p)) is not p.p:
158
+ p = Indent(p.n, n) # type: ignore[arg-type]
159
+ return p
160
+
161
+ elif isinstance(p, (Blank, Text, List)):
162
+ return p
163
+
164
+ else:
165
+ raise TypeError(p)
166
+
167
+ return rec(root)
@@ -0,0 +1,139 @@
1
+ import typing as ta
2
+
3
+ from ... import check
4
+ from ... import dataclasses as dc
5
+ from ... import lang
6
+
7
+
8
+ ##
9
+
10
+
11
+ @dc.dataclass(frozen=True)
12
+ @dc.extra_class_params(terse_repr=True)
13
+ class Part(lang.Abstract, lang.Sealed):
14
+ pass
15
+
16
+
17
+ @dc.dataclass(frozen=True)
18
+ @dc.extra_class_params(terse_repr=True)
19
+ class Text(Part, lang.Final):
20
+ s: str
21
+
22
+ @dc.init
23
+ def _check_s(self) -> None:
24
+ check.non_empty_str(self.s)
25
+ check.state(self.s == self.s.strip())
26
+
27
+
28
+ @dc.dataclass(frozen=True)
29
+ @dc.extra_class_params(terse_repr=True)
30
+ class Blank(Part, lang.Final):
31
+ pass
32
+
33
+
34
+ @dc.dataclass(frozen=True)
35
+ @dc.extra_class_params(terse_repr=True)
36
+ class Indent(Part, lang.Final):
37
+ n: int = dc.xfield(validate=lambda n: n > 0)
38
+ p: ta.Union[Text, 'Block', 'List'] = dc.xfield(coerce=lambda p: check.isinstance(p, (Text, Block, List)))
39
+
40
+
41
+ @dc.dataclass(frozen=True)
42
+ @dc.extra_class_params(terse_repr=True)
43
+ class Block(Part, lang.Final):
44
+ ps: ta.Sequence[Part]
45
+
46
+ @dc.init
47
+ def _check_ps(self) -> None:
48
+ check.state(len(self.ps) > 1)
49
+ for i, p in enumerate(self.ps):
50
+ check.isinstance(p, Part)
51
+ if i and isinstance(p, Block):
52
+ check.not_isinstance(self.ps[i - 1], Block)
53
+
54
+
55
+ @dc.dataclass(frozen=True)
56
+ @dc.extra_class_params(terse_repr=True)
57
+ class List(Part, lang.Final):
58
+ d: str = dc.xfield(coerce=check.non_empty_str)
59
+ es: ta.Sequence[Part] = dc.xfield()
60
+
61
+ @dc.init
62
+ def _check_es(self) -> None:
63
+ check.not_empty(self.es)
64
+ for e in self.es:
65
+ check.isinstance(e, Part)
66
+
67
+
68
+ ##
69
+
70
+
71
+ def _squish(ps: ta.Sequence[Part]) -> ta.Sequence[Part]:
72
+ for p in ps:
73
+ check.isinstance(p, Part)
74
+
75
+ if len(ps) < 2:
76
+ return ps
77
+
78
+ while True:
79
+ if any(isinstance(p, Block) for p in ps):
80
+ ps = list(lang.flatmap(lambda p: p.ps if isinstance(p, Block) else [p], ps))
81
+ continue
82
+
83
+ if any(
84
+ isinstance(ps[i], Indent) and
85
+ isinstance(ps[i + 1], Indent) and
86
+ ps[i].n == ps[i + 1].n # type: ignore[attr-defined]
87
+ for i in range(len(ps) - 1)
88
+ ):
89
+ new: list[Part | tuple[int, list[Part]]] = []
90
+ for p in ps:
91
+ if isinstance(p, Indent):
92
+ if new and isinstance(y := new[-1], tuple) and p.n == y[0]:
93
+ y[1].append(p.p)
94
+ else:
95
+ new.append((p.n, [p.p]))
96
+ else:
97
+ new.append(p)
98
+ ps = [
99
+ Indent(x[0], blockify(*x[1])) if isinstance(x, tuple) else x # type: ignore[arg-type]
100
+ for x in new
101
+ ]
102
+ continue
103
+
104
+ break
105
+
106
+ return ps
107
+
108
+
109
+ def blockify(*ps: Part) -> Part:
110
+ check.not_empty(ps)
111
+ ps = _squish(ps) # type: ignore[assignment]
112
+ if len(ps) == 1:
113
+ return ps[0]
114
+ return Block(ps)
115
+
116
+
117
+ ##
118
+
119
+
120
+ def build_root(s: str) -> Part:
121
+ lst: list[Part] = []
122
+
123
+ for l in s.splitlines():
124
+ if not (sl := l.strip()):
125
+ lst.append(Blank())
126
+ continue
127
+
128
+ p: Part = Text(sl)
129
+
130
+ n = next((i for i, c in enumerate(l) if not c.isspace()), 0)
131
+ if n:
132
+ p = Indent(n, p) # type: ignore[arg-type]
133
+
134
+ lst.append(p)
135
+
136
+ if len(lst) == 1:
137
+ return lst[0]
138
+ else:
139
+ return Block(lst)