omdev 0.0.0.dev486__py3-none-any.whl → 0.0.0.dev506__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of omdev might be problematic. Click here for more details.

Files changed (50) hide show
  1. omdev/.omlish-manifests.json +2 -2
  2. omdev/README.md +51 -0
  3. omdev/__about__.py +4 -2
  4. omdev/ci/cli.py +1 -1
  5. omdev/cli/clicli.py +37 -7
  6. omdev/dataclasses/cli.py +1 -1
  7. omdev/interp/cli.py +1 -1
  8. omdev/interp/types.py +3 -2
  9. omdev/interp/uv/provider.py +36 -0
  10. omdev/manifests/main.py +1 -1
  11. omdev/markdown/incparse.py +392 -0
  12. omdev/packaging/revisions.py +1 -1
  13. omdev/py/tools/pipdepup.py +150 -93
  14. omdev/pyproject/cli.py +2 -36
  15. omdev/pyproject/configs.py +1 -1
  16. omdev/pyproject/pkg.py +1 -1
  17. omdev/pyproject/reqs.py +8 -7
  18. omdev/pyproject/tools/aboutdeps.py +5 -0
  19. omdev/pyproject/tools/pyversions.py +47 -0
  20. omdev/pyproject/versions.py +40 -0
  21. omdev/scripts/ci.py +369 -26
  22. omdev/scripts/interp.py +51 -9
  23. omdev/scripts/lib/inject.py +8 -1
  24. omdev/scripts/lib/logs.py +117 -21
  25. omdev/scripts/pyproject.py +479 -76
  26. omdev/tools/git/cli.py +43 -13
  27. omdev/tools/json/formats.py +2 -0
  28. omdev/tools/jsonview/cli.py +19 -61
  29. omdev/tools/jsonview/resources/jsonview.html.j2 +43 -0
  30. omdev/tools/pawk/README.md +195 -0
  31. omdev/tools/sqlrepl.py +189 -78
  32. omdev/tui/apps/edit/main.py +5 -1
  33. omdev/tui/apps/irc/app.py +28 -20
  34. omdev/tui/apps/irc/commands.py +1 -1
  35. omdev/tui/rich/__init__.py +12 -0
  36. omdev/tui/rich/markdown2.py +219 -18
  37. omdev/tui/textual/__init__.py +41 -2
  38. omdev/tui/textual/app2.py +6 -1
  39. omdev/tui/textual/debug/__init__.py +10 -0
  40. omdev/tui/textual/debug/dominfo.py +151 -0
  41. omdev/tui/textual/debug/screen.py +24 -0
  42. omdev/tui/textual/devtools.py +187 -0
  43. omdev/tui/textual/logging2.py +20 -0
  44. omdev/tui/textual/types.py +45 -0
  45. {omdev-0.0.0.dev486.dist-info → omdev-0.0.0.dev506.dist-info}/METADATA +10 -6
  46. {omdev-0.0.0.dev486.dist-info → omdev-0.0.0.dev506.dist-info}/RECORD +50 -39
  47. {omdev-0.0.0.dev486.dist-info → omdev-0.0.0.dev506.dist-info}/WHEEL +0 -0
  48. {omdev-0.0.0.dev486.dist-info → omdev-0.0.0.dev506.dist-info}/entry_points.txt +0 -0
  49. {omdev-0.0.0.dev486.dist-info → omdev-0.0.0.dev506.dist-info}/licenses/LICENSE +0 -0
  50. {omdev-0.0.0.dev486.dist-info → omdev-0.0.0.dev506.dist-info}/top_level.txt +0 -0
@@ -39,7 +39,7 @@
39
39
  "module": ".cli.clicli",
40
40
  "attr": "_CLI_MODULE",
41
41
  "file": "omdev/cli/clicli.py",
42
- "line": 233,
42
+ "line": 263,
43
43
  "value": {
44
44
  "!.cli.types.CliModule": {
45
45
  "name": "cli",
@@ -527,7 +527,7 @@
527
527
  "module": ".tools.sqlrepl",
528
528
  "attr": "_CLI_MODULE",
529
529
  "file": "omdev/tools/sqlrepl.py",
530
- "line": 196,
530
+ "line": 307,
531
531
  "value": {
532
532
  "!.cli.types.CliModule": {
533
533
  "name": "sqlrepl",
omdev/README.md ADDED
@@ -0,0 +1,51 @@
1
+ # Overview
2
+
3
+ Development utilities and support code.
4
+
5
+ # Notable packages
6
+
7
+ - **[cli](https://github.com/wrmsr/omlish/blob/master/omdev/cli)** - The codebase's all-in-one CLI. This is not
8
+ installed as an entrypoint / command when this package is itself installed - that is separated into the `omdev-cli`
9
+ installable package so as to not pollute users' bin/ directories when depping this lib for its utility code.
10
+
11
+ - **[amalg](https://github.com/wrmsr/omlish/blob/master/omdev/amalg)** - The [amalgamator](#amalgamation).
12
+
13
+ - **[pyproject](https://github.com/wrmsr/omlish/blob/master/omdev/pyproject)**
14
+ ([amalg](https://github.com/wrmsr/omlish/blob/master/omdev/scripts/pyproject.py)) - python project management tool.
15
+ wrangles but does not replace tools like venv, pip, setuptools, and uv. does things like sets up venvs, generates
16
+ [`.pkg`](https://github.com/wrmsr/omlish/blob/master/.pkg) directories and their `pyproject.toml`'s (from their
17
+ `__about__.py`'s), and packages them. this should grow to eat more and more of the Makefile. as it is amalgamated it
18
+ requires no installation and can just be dropped into other projects / repos.
19
+
20
+ - **[ci](https://github.com/wrmsr/omlish/blob/master/omdev/ci)**
21
+ ([amalg](https://github.com/wrmsr/omlish/blob/master/omdev/scripts/ci.py)) - ci runner. given a
22
+ [`compose.yml`](https://github.com/wrmsr/omlish/blob/master/docker/compose.yml)
23
+ and requirements.txt files, takes care of building and caching of containers and venvs and execution of required ci
24
+ commands. detects and [natively uses](https://github.com/wrmsr/omlish/blob/master/omdev/ci/github/api/v2)
25
+ github-action's caching system. unifies ci execution between local dev and github runners.
26
+
27
+ - **[tools.json](https://github.com/wrmsr/omlish/blob/master/omdev/tools/json)** (cli: `om j`) - a tool for json-like
28
+ data, obviously in the vein of [jq](https://github.com/jqlang/jq) but using the internal
29
+ [jmespath](https://github.com/wrmsr/omlish/blob/master/omlish/specs/jmespath) engine. supports
30
+ [true streaming](https://github.com/wrmsr/omlish/blob/master/omlish/formats/json/stream) json input and output, as
31
+ well as [various other](https://github.com/wrmsr/omlish/blob/master/omdev/tools/json/formats.py) non-streaming input
32
+ formats.
33
+
34
+ - **[tools.git](https://github.com/wrmsr/omlish/blob/master/omdev/tools/git)** (cli: `om git`) - a tool for various lazy
35
+ git operations, including the one that (poorly) writes all of these commit messages.
36
+
37
+ # Amalgamation
38
+
39
+ Amalgamation is the process of stitching together multiple python source files into a single self-contained python
40
+ script. ['lite'](https://github.com/wrmsr/omlish/blob/master/omlish#lite-code) code is written in a style conducive to
41
+ this.
42
+
43
+ # Local storage
44
+
45
+ Some of this code, when asked, will store things on the local filesystem. The only directories used (outside of ones
46
+ explicitly specified as command or function arguments) are managed in
47
+ [home.paths](https://github.com/wrmsr/omlish/blob/master/omdev/home/paths.py), and are the following:
48
+
49
+ - `$OMLISH_HOME`, default of `~/.omlish` - persistent things like config and state.
50
+ - `$OMLISH_CACHE`, default of `~/.cache/omlish` - used for things like the local ci cache and
51
+ [various other](https://github.com/search?q=repo%3Awrmsr%2Fomlish+%22dcache.%22&type=code) cached data.
omdev/__about__.py CHANGED
@@ -13,7 +13,7 @@ class Project(ProjectBase):
13
13
 
14
14
  optional_dependencies = {
15
15
  'black': [
16
- 'black ~= 25.11',
16
+ 'black ~= 26.1',
17
17
  ],
18
18
 
19
19
  'c': [
@@ -44,7 +44,9 @@ class Project(ProjectBase):
44
44
 
45
45
  'tui': [
46
46
  'rich ~= 14.2',
47
- 'textual ~= 6.8',
47
+ 'textual ~= 7.3', # [syntax]
48
+ 'textual-dev ~= 1.8',
49
+ 'textual-speedups ~= 0.2',
48
50
  ],
49
51
  }
50
52
 
omdev/ci/cli.py CHANGED
@@ -22,7 +22,7 @@ from omlish.argparse.cli import argparse_cmd
22
22
  from omlish.lite.check import check
23
23
  from omlish.lite.inject import inj
24
24
  from omlish.logs.modules import get_module_logger
25
- from omlish.logs.standard import configure_standard_logging
25
+ from omlish.logs.std.standard import configure_standard_logging
26
26
 
27
27
  from .cache import DirectoryFileCache
28
28
  from .ci import Ci
omdev/cli/clicli.py CHANGED
@@ -1,9 +1,11 @@
1
+ import dataclasses as dc
1
2
  import inspect
2
3
  import os
3
4
  import re
4
5
  import shlex
5
6
  import subprocess
6
7
  import sys
8
+ import time
7
9
  import typing as ta
8
10
  import urllib.parse
9
11
  import urllib.request
@@ -13,6 +15,7 @@ from omlish import lang
13
15
  from omlish.argparse import all as ap
14
16
  from omlish.os.temp import temp_dir_context
15
17
 
18
+ from ..packaging.versions import Version
16
19
  from ..pip import get_root_dists
17
20
  from ..pip import lookup_latest_package_version
18
21
  from . import install
@@ -87,22 +90,49 @@ class CliCli(ap.Cli):
87
90
 
88
91
  #
89
92
 
93
+ @dc.dataclass()
94
+ class ReinstallWouldNotUpgradeError(Exception):
95
+ current_version: str
96
+ target_version: str
97
+
98
+ def __str__(self) -> str:
99
+ return f'Current version {self.current_version} is not older than target version {self.target_version} '
100
+
90
101
  @ap.cmd(
91
102
  ap.arg('--url', default=DEFAULT_REINSTALL_URL),
92
103
  ap.arg('--local', action='store_true'),
93
104
  ap.arg('--no-deps', action='store_true'),
94
105
  ap.arg('--no-uv', action='store_true'),
95
106
  ap.arg('--dry-run', action='store_true'),
107
+ ap.arg('--must-upgrade', action='store_true'),
108
+ ap.arg('--must-upgrade-loop', action='store_true'),
96
109
  ap.arg('--version'),
97
110
  ap.arg('extra_deps', nargs='*'),
98
111
  )
99
112
  def reinstall(self) -> None:
100
- latest_version = _parse_latest_version_str(lookup_latest_package_version(__package__.split('.')[0]))
101
-
102
- if self.args.version is not None:
103
- target_version: str = self.args.version
104
- else:
105
- target_version = latest_version
113
+ current_version = __about__.__version__
114
+
115
+ while True:
116
+ latest_version = _parse_latest_version_str(lookup_latest_package_version(__package__.split('.')[0]))
117
+
118
+ if self.args.version is not None:
119
+ target_version: str = self.args.version
120
+ else:
121
+ target_version = latest_version
122
+
123
+ if self.args.must_upgrade or self.args.must_upgrade_loop:
124
+ current_vo = Version(current_version)
125
+ target_vo = Version(target_version)
126
+ if not (target_vo > current_vo):
127
+ ex = CliCli.ReinstallWouldNotUpgradeError(current_version, target_version)
128
+ if self.args.must_upgrade_loop:
129
+ print(ex)
130
+ time.sleep(1)
131
+ continue
132
+ else:
133
+ raise ex
134
+
135
+ break
106
136
 
107
137
  #
108
138
 
@@ -186,7 +216,7 @@ class CliCli(ap.Cli):
186
216
 
187
217
  #
188
218
 
189
- print(f'Current version: {__about__.__version__}')
219
+ print(f'Current version: {current_version}')
190
220
  print(f'Latest version: {latest_version}')
191
221
  print(f'Target version: {target_version}')
192
222
  print()
omdev/dataclasses/cli.py CHANGED
@@ -1,5 +1,5 @@
1
1
  from omlish.argparse import all as ap
2
- from omlish.logs.standard import configure_standard_logging
2
+ from omlish.logs.std.standard import configure_standard_logging
3
3
 
4
4
  from .codegen import DataclassCodeGen
5
5
 
omdev/interp/cli.py CHANGED
@@ -17,7 +17,7 @@ from omlish.lite.check import check
17
17
  from omlish.lite.inject import Injector
18
18
  from omlish.lite.inject import inj
19
19
  from omlish.lite.runtime import check_lite_runtime_version
20
- from omlish.logs.standard import configure_standard_logging
20
+ from omlish.logs.std.standard import configure_standard_logging
21
21
 
22
22
  from .inject import bind_interp
23
23
  from .resolvers import InterpResolver
omdev/interp/types.py CHANGED
@@ -85,9 +85,10 @@ class InterpSpecifier:
85
85
  def parse(cls, s: str) -> 'InterpSpecifier':
86
86
  s, o = InterpOpts.parse_suffix(s)
87
87
  if not any(s.startswith(o) for o in Specifier.OPERATORS):
88
- s = '~=' + s
89
88
  if s.count('.') < 2:
90
- s += '.0'
89
+ s = '~=' + s + '.0'
90
+ else:
91
+ s = '==' + s
91
92
  return cls(
92
93
  specifier=Specifier(s),
93
94
  opts=o,
@@ -5,7 +5,9 @@ uv run --python 3.11.6 pip
5
5
  uv venv --python 3.11.6 --seed barf
6
6
  python3 -m venv barf && barf/bin/pip install uv && barf/bin/uv venv --python 3.11.6 --seed barf2
7
7
  uv python find '3.13.10'
8
+ uv python list --output-format=json
8
9
  """
10
+ import dataclasses as dc
9
11
  import typing as ta
10
12
 
11
13
  from omlish.logs.protocols import LoggerLike
@@ -21,6 +23,34 @@ from .uv import Uv
21
23
  ##
22
24
 
23
25
 
26
+ @dc.dataclass(frozen=True)
27
+ class UvPythonListOutput:
28
+ key: str
29
+ version: str
30
+
31
+ @dc.dataclass(frozen=True)
32
+ class VersionParts:
33
+ major: int
34
+ minor: int
35
+ patch: int
36
+
37
+ version_parts: VersionParts
38
+
39
+ path: ta.Optional[str]
40
+ symlink: ta.Optional[str]
41
+
42
+ url: str
43
+
44
+ os: str # emscripten linux macos
45
+ variant: str # default freethreaded
46
+ implementation: str # cpython graalpy pyodide pypy
47
+ arch: str # aarch64 wasm32 x86_64
48
+ libc: str # gnu musl none
49
+
50
+
51
+ ##
52
+
53
+
24
54
  class UvInterpProvider(InterpProvider):
25
55
  def __init__(
26
56
  self,
@@ -40,3 +70,9 @@ class UvInterpProvider(InterpProvider):
40
70
 
41
71
  async def get_installed_version(self, version: InterpVersion) -> Interp:
42
72
  raise NotImplementedError
73
+
74
+ # async def get_installable_versions(self, spec: InterpSpecifier) -> ta.Sequence[InterpVersion]:
75
+ # return []
76
+
77
+ # async def install_version(self, version: InterpVersion) -> Interp:
78
+ # raise TypeError
omdev/manifests/main.py CHANGED
@@ -5,7 +5,7 @@ import multiprocessing as mp
5
5
  import os.path
6
6
 
7
7
  from omlish.lite.json import json_dumps_pretty
8
- from omlish.logs.standard import configure_standard_logging
8
+ from omlish.logs.std.standard import configure_standard_logging
9
9
 
10
10
  from .building import ManifestBuilder
11
11
  from .building import check_package_manifests
@@ -114,3 +114,395 @@ class IncrementalMarkdownParser:
114
114
  adjusted.append(token)
115
115
 
116
116
  return adjusted
117
+
118
+
119
+ ##
120
+
121
+
122
+ class ClaudeIncrementalMarkdownParser:
123
+ # @omlish-llm-author "claude-opus-4-5"
124
+
125
+ def __init__(
126
+ self,
127
+ *,
128
+ parser: ta.Optional['md.MarkdownIt'] = None,
129
+ ) -> None:
130
+ super().__init__()
131
+
132
+ if parser is None:
133
+ parser = md.MarkdownIt()
134
+ self._parser = parser
135
+
136
+ self._stable_tokens: list[md.token.Token] = []
137
+ self._buffer = ''
138
+ self._num_stable_lines = 0
139
+
140
+ class FeedOutput(ta.NamedTuple):
141
+ stable: ta.Sequence['md.token.Token']
142
+ new_stable: ta.Sequence['md.token.Token']
143
+ unstable: ta.Sequence['md.token.Token']
144
+
145
+ def feed2(self, chunk: str) -> FeedOutput:
146
+ self._buffer += chunk
147
+
148
+ new_tokens = self._parser.parse(self._buffer)
149
+
150
+ adjusted_tokens = self._adjust_token_line_numbers(new_tokens, self._num_stable_lines)
151
+
152
+ stable_count = self._find_stable_token_count(adjusted_tokens, self._buffer)
153
+
154
+ newly_stable: ta.Sequence[md.token.Token]
155
+ if stable_count > 0:
156
+ newly_stable = adjusted_tokens[:stable_count]
157
+
158
+ max_line = 0
159
+ for token in newly_stable:
160
+ if token.map:
161
+ max_line = max(max_line, token.map[1])
162
+
163
+ if max_line > self._num_stable_lines:
164
+ lines_to_remove = max_line - self._num_stable_lines
165
+ lines = self._buffer.split('\n')
166
+ self._buffer = '\n'.join(lines[lines_to_remove:])
167
+
168
+ self._stable_tokens.extend(newly_stable)
169
+ self._num_stable_lines = max_line
170
+
171
+ else:
172
+ newly_stable = ()
173
+
174
+ return ClaudeIncrementalMarkdownParser.FeedOutput(
175
+ stable=self._stable_tokens,
176
+ new_stable=newly_stable,
177
+ unstable=adjusted_tokens[stable_count:],
178
+ )
179
+
180
+ def feed(self, chunk: str) -> list['md.token.Token']:
181
+ out = self.feed2(chunk)
182
+ return [*out.stable, *out.unstable]
183
+
184
+ def _find_stable_token_count(
185
+ self,
186
+ tokens: list['md.token.Token'],
187
+ buffer: str,
188
+ ) -> int:
189
+ if not tokens:
190
+ return 0
191
+
192
+ parent_indices = []
193
+ for i, token in enumerate(tokens):
194
+ if token.nesting in (1, 0) and token.level == 0:
195
+ parent_indices.append(i)
196
+
197
+ if len(parent_indices) < 2:
198
+ return 0
199
+
200
+ # Find the last parent index that is fully terminated. We need at least one more parent after it to consider it
201
+ # stable.
202
+ buffer_lines = buffer.split('\n')
203
+
204
+ for candidate_idx in range(len(parent_indices) - 2, -1, -1):
205
+ token_list_idx = parent_indices[candidate_idx]
206
+
207
+ # Find the end of this block (either the token itself or its closing tag)
208
+ block_end_idx = token_list_idx
209
+ if tokens[token_list_idx].nesting == 1:
210
+ # Opening tag - find corresponding close
211
+ depth = 1
212
+ for j in range(token_list_idx + 1, len(tokens)):
213
+ if tokens[j].level == 0:
214
+ if tokens[j].nesting == 1:
215
+ depth += 1
216
+ elif tokens[j].nesting == -1:
217
+ depth -= 1
218
+ if depth == 0:
219
+ block_end_idx = j
220
+ break
221
+
222
+ # Get the line range for this block
223
+ end_line = 0
224
+ for t in tokens[:block_end_idx + 1]:
225
+ if t.map:
226
+ end_line = max(end_line, t.map[1])
227
+
228
+ # Check if followed by blank line or another clear block boundary end_line is exclusive (points to line
229
+ # after the block). Relative to current buffer (not absolute).
230
+ relative_end = end_line - self._num_stable_lines
231
+
232
+ if relative_end < 0:
233
+ continue
234
+
235
+ if relative_end < len(buffer_lines):
236
+ # Check for blank line or clear termination
237
+ if relative_end < len(buffer_lines):
238
+ following_content = '\n'.join(buffer_lines[relative_end:])
239
+ # Stable if: blank line follows, or significant content after
240
+ if (
241
+ following_content.startswith('\n') or
242
+ (following_content.strip() and len(following_content.strip()) > 0)
243
+ ):
244
+ # Check the next parent token exists and has been parsed
245
+ if candidate_idx + 1 < len(parent_indices):
246
+ next_parent_idx = parent_indices[candidate_idx + 1]
247
+ next_token = tokens[next_parent_idx]
248
+ # The next block should start after our block ends
249
+ if next_token.map and next_token.map[0] >= end_line - self._num_stable_lines:
250
+ return parent_indices[candidate_idx + 1]
251
+
252
+ return 0
253
+
254
+ def _adjust_token_line_numbers(
255
+ self,
256
+ tokens: list['md.token.Token'],
257
+ line_offset: int,
258
+ ) -> list['md.token.Token']:
259
+ adjusted = []
260
+ for token in tokens:
261
+ if token.map:
262
+ token = dc.replace(
263
+ token,
264
+ map=[token.map[0] + line_offset, token.map[1] + line_offset],
265
+ )
266
+
267
+ adjusted.append(token)
268
+
269
+ return adjusted
270
+
271
+
272
+ class GptIncrementalMarkdownParser:
273
+ # @omlish-llm-author "gpt-5.2"
274
+
275
+ def __init__(
276
+ self,
277
+ *,
278
+ parser: ta.Optional['md.MarkdownIt'] = None,
279
+ ) -> None:
280
+ super().__init__()
281
+
282
+ if parser is None:
283
+ parser = md.MarkdownIt()
284
+ self._parser = parser
285
+
286
+ self._stable_tokens: list[md.token.Token] = []
287
+ self._buffer = ''
288
+ self._num_stable_lines = 0 # Number of *source* lines removed from the buffer and committed.
289
+
290
+ class FeedOutput(ta.NamedTuple):
291
+ stable: ta.Sequence['md.token.Token']
292
+ new_stable: ta.Sequence['md.token.Token']
293
+ unstable: ta.Sequence['md.token.Token']
294
+
295
+ def feed2(self, chunk: str) -> FeedOutput:
296
+ self._buffer += chunk
297
+
298
+ # Parse the current buffer (line numbers are relative to the buffer's start).
299
+ new_tokens = self._parser.parse(self._buffer)
300
+
301
+ # Adjust ALL tokens to account for stable lines from previous parses.
302
+ adjusted_tokens = self._adjust_token_line_numbers(new_tokens, self._num_stable_lines)
303
+
304
+ # Decide how many *source lines* from the front of the buffer are safe to commit permanently.
305
+ stable_line_cut = self._find_stable_line_cut(self._buffer)
306
+ stable_abs_line = self._num_stable_lines + stable_line_cut
307
+
308
+ newly_stable: ta.Sequence[md.token.Token]
309
+ if stable_line_cut > 0:
310
+ # Commit tokens that are wholly before the stable cut.
311
+ newly_stable_list: list[md.token.Token] = []
312
+ remaining_list: list[md.token.Token] = []
313
+
314
+ for t in adjusted_tokens:
315
+ # Tokens without maps are treated conservatively as unstable unless we've already committed
316
+ # all remaining source.
317
+ if not t.map:
318
+ remaining_list.append(t)
319
+ continue
320
+
321
+ # t.map is [start_line, end_line) in absolute source lines (after adjustment).
322
+ if t.map[1] <= stable_abs_line:
323
+ newly_stable_list.append(t)
324
+ else:
325
+ remaining_list.append(t)
326
+
327
+ newly_stable = newly_stable_list
328
+
329
+ # Remove committed source lines from the buffer.
330
+ lines = self._buffer.split('\n')
331
+ self._buffer = '\n'.join(lines[stable_line_cut:])
332
+
333
+ # Persist committed state.
334
+ self._stable_tokens.extend(newly_stable)
335
+ self._num_stable_lines = stable_abs_line
336
+
337
+ unstable = remaining_list
338
+
339
+ else:
340
+ newly_stable = ()
341
+ unstable = adjusted_tokens
342
+
343
+ return GptIncrementalMarkdownParser.FeedOutput(
344
+ stable=self._stable_tokens,
345
+ new_stable=newly_stable,
346
+ unstable=unstable,
347
+ )
348
+
349
+ def feed(self, chunk: str) -> list['md.token.Token']:
350
+ out = self.feed2(chunk)
351
+ return [*out.stable, *out.unstable]
352
+
353
+ ##
354
+ # Stability boundary
355
+
356
+ def _find_stable_line_cut(self, buf: str) -> int:
357
+ """
358
+ Return a conservative number of *source lines* from the buffer start that can be treated as permanently stable
359
+ (i.e. future suffixes of the markdown source will not change their parse/render).
360
+
361
+ This intentionally errs on the side of keeping more in the unstable tail.
362
+ """
363
+
364
+ if not buf:
365
+ return 0
366
+
367
+ lines = buf.split('\n')
368
+
369
+ # Track whether we're inside a fenced code block. This is the biggest retroactive-parse hazard.
370
+ in_fence = False
371
+ fence_marker: str | None = None
372
+
373
+ # Track whether we're inside a blockquote region (heuristic).
374
+ in_quote = False
375
+
376
+ # Track whether we're inside a list region (heuristic).
377
+ in_list = False
378
+
379
+ # We only commit up to a "hard" boundary: a blank line that is outside fence/quote/list context. Additionally,
380
+ # we require that the boundary line itself is blank (so setext headings can't reach back).
381
+ last_safe_cut: int = 0
382
+
383
+ def is_blank(s: str) -> bool:
384
+ return not s.strip()
385
+
386
+ def is_fence_line(s: str) -> str | None:
387
+ st = s.lstrip()
388
+ if st.startswith('```'):
389
+ return '```'
390
+ if st.startswith('~~~'):
391
+ return '~~~'
392
+ return None
393
+
394
+ def is_quote_line(s: str) -> bool:
395
+ return s.lstrip().startswith('>')
396
+
397
+ def is_list_line(s: str) -> bool:
398
+ st = s.lstrip()
399
+ if not st:
400
+ return False
401
+ # Very conservative list marker detection.
402
+ if st[0] in ('-', '*', '+') and len(st) > 1 and st[1].isspace():
403
+ return True
404
+ # "1. " / "1) "
405
+ i = 0
406
+ while i < len(st) and st[i].isdigit():
407
+ i += 1
408
+ if i > 0 and i < len(st) and st[i] in ('.', ')'):
409
+ j = i + 1
410
+ return j < len(st) and st[j].isspace()
411
+ return False
412
+
413
+ def is_indented_code(s: str) -> bool:
414
+ # Indented code blocks (4 spaces / 1 tab) can be sensitive to context; treat as "unstable context" for
415
+ # committing boundaries.
416
+ return s.startswith((' ', '\t'))
417
+
418
+ for i, line in enumerate(lines):
419
+ # Fence tracking.
420
+ fm = is_fence_line(line)
421
+ if fm is not None:
422
+ if not in_fence:
423
+ in_fence = True
424
+ fence_marker = fm
425
+ else:
426
+ # Only close on the matching marker (conservative).
427
+ if fence_marker == fm:
428
+ in_fence = False
429
+ fence_marker = None
430
+
431
+ # Quote tracking (heuristic: treat contiguous quote lines as quote context).
432
+ if is_quote_line(line):
433
+ in_quote = True
434
+ elif is_blank(line):
435
+ # A blank line is a potential place to end a quote, but only if we are not in a fence.
436
+ if not in_fence:
437
+ in_quote = False
438
+
439
+ # List tracking (heuristic: any list marker enters list context; blank lines end list context only if the
440
+ # following non-blank line is not indented / not list / not quote).
441
+ if is_list_line(line):
442
+ in_list = True
443
+ if is_blank(line) and not in_fence:
444
+ # Peek ahead to see if the list plausibly continues.
445
+ j = i + 1
446
+ while j < len(lines) and is_blank(lines[j]):
447
+ j += 1
448
+ if j >= len(lines):
449
+ # End of buffer: keep tail unstable.
450
+ pass
451
+ else:
452
+ nxt = lines[j]
453
+ if (
454
+ not is_indented_code(nxt) and
455
+ not is_list_line(nxt) and
456
+ not is_quote_line(nxt)
457
+ ):
458
+ in_list = False
459
+
460
+ # Commit boundary selection.
461
+ if is_blank(line) and not in_fence and not in_quote and not in_list:
462
+ # Safe to commit through this blank line (i.e. cut after it).
463
+ last_safe_cut = i + 1
464
+
465
+ # Never cut the entire buffer; leave at least one line in the tail so incremental feeds keep working.
466
+ if last_safe_cut >= len(lines):
467
+ return 0
468
+
469
+ return last_safe_cut
470
+
471
+ def _adjust_token_line_numbers(
472
+ self,
473
+ tokens: list['md.token.Token'],
474
+ line_offset: int,
475
+ ) -> list['md.token.Token']:
476
+ adjusted: list[md.token.Token] = []
477
+
478
+ def adj_tok(t: 'md.token.Token') -> 'md.token.Token':
479
+ nt = t
480
+ if nt.map:
481
+ nt = dc.replace(
482
+ nt,
483
+ map=[nt.map[0] + line_offset, nt.map[1] + line_offset],
484
+ )
485
+
486
+ # Adjust children maps too (markdown-it uses children for inline tokens).
487
+ ch = getattr(nt, 'children', None)
488
+ if ch:
489
+ new_children: list[md.token.Token] = []
490
+ changed = False
491
+ for c in ch:
492
+ nc = c
493
+ if nc.map:
494
+ nc = dc.replace(
495
+ nc,
496
+ map=[nc.map[0] + line_offset, nc.map[1] + line_offset],
497
+ )
498
+ changed = True
499
+ new_children.append(nc)
500
+ if changed:
501
+ nt = dc.replace(nt, children=new_children)
502
+
503
+ return nt
504
+
505
+ for token in tokens:
506
+ adjusted.append(adj_tok(token))
507
+
508
+ return adjusted
@@ -15,7 +15,7 @@ import zipfile
15
15
  from omlish.lite.cached import cached_nullary
16
16
  from omlish.lite.check import check
17
17
  from omlish.logs.modules import get_module_logger
18
- from omlish.logs.standard import configure_standard_logging
18
+ from omlish.logs.std.standard import configure_standard_logging
19
19
 
20
20
  from ..git.revisions import get_git_revision
21
21
  from .wheelfile import WheelFile