Sphinx 8.1.3__py3-none-any.whl → 8.2.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of Sphinx might be problematic. Click here for more details.
- sphinx/__init__.py +8 -4
- sphinx/__main__.py +2 -0
- sphinx/_cli/__init__.py +2 -5
- sphinx/_cli/util/colour.py +34 -11
- sphinx/_cli/util/errors.py +128 -61
- sphinx/addnodes.py +51 -35
- sphinx/application.py +362 -230
- sphinx/builders/__init__.py +87 -64
- sphinx/builders/_epub_base.py +65 -56
- sphinx/builders/changes.py +17 -23
- sphinx/builders/dirhtml.py +8 -13
- sphinx/builders/epub3.py +70 -38
- sphinx/builders/gettext.py +93 -73
- sphinx/builders/html/__init__.py +240 -186
- sphinx/builders/html/_assets.py +9 -2
- sphinx/builders/html/_build_info.py +3 -0
- sphinx/builders/latex/__init__.py +64 -54
- sphinx/builders/latex/constants.py +14 -11
- sphinx/builders/latex/nodes.py +2 -0
- sphinx/builders/latex/theming.py +8 -9
- sphinx/builders/latex/transforms.py +7 -5
- sphinx/builders/linkcheck.py +193 -149
- sphinx/builders/manpage.py +17 -17
- sphinx/builders/singlehtml.py +28 -16
- sphinx/builders/texinfo.py +28 -21
- sphinx/builders/text.py +10 -15
- sphinx/builders/xml.py +10 -19
- sphinx/cmd/build.py +49 -119
- sphinx/cmd/make_mode.py +35 -31
- sphinx/cmd/quickstart.py +78 -62
- sphinx/config.py +265 -163
- sphinx/directives/__init__.py +51 -54
- sphinx/directives/admonitions.py +107 -0
- sphinx/directives/code.py +24 -19
- sphinx/directives/other.py +21 -42
- sphinx/directives/patches.py +28 -16
- sphinx/domains/__init__.py +54 -31
- sphinx/domains/_domains_container.py +22 -17
- sphinx/domains/_index.py +5 -8
- sphinx/domains/c/__init__.py +366 -245
- sphinx/domains/c/_ast.py +378 -256
- sphinx/domains/c/_ids.py +89 -31
- sphinx/domains/c/_parser.py +283 -214
- sphinx/domains/c/_symbol.py +269 -198
- sphinx/domains/changeset.py +39 -24
- sphinx/domains/citation.py +54 -24
- sphinx/domains/cpp/__init__.py +517 -362
- sphinx/domains/cpp/_ast.py +999 -682
- sphinx/domains/cpp/_ids.py +133 -65
- sphinx/domains/cpp/_parser.py +746 -588
- sphinx/domains/cpp/_symbol.py +692 -489
- sphinx/domains/index.py +10 -8
- sphinx/domains/javascript.py +152 -74
- sphinx/domains/math.py +48 -40
- sphinx/domains/python/__init__.py +402 -211
- sphinx/domains/python/_annotations.py +114 -57
- sphinx/domains/python/_object.py +151 -67
- sphinx/domains/rst.py +94 -49
- sphinx/domains/std/__init__.py +510 -249
- sphinx/environment/__init__.py +345 -61
- sphinx/environment/adapters/asset.py +7 -1
- sphinx/environment/adapters/indexentries.py +15 -20
- sphinx/environment/adapters/toctree.py +19 -9
- sphinx/environment/collectors/__init__.py +3 -1
- sphinx/environment/collectors/asset.py +18 -15
- sphinx/environment/collectors/dependencies.py +8 -10
- sphinx/environment/collectors/metadata.py +6 -4
- sphinx/environment/collectors/title.py +3 -1
- sphinx/environment/collectors/toctree.py +4 -4
- sphinx/errors.py +1 -3
- sphinx/events.py +4 -4
- sphinx/ext/apidoc/__init__.py +21 -0
- sphinx/ext/apidoc/__main__.py +9 -0
- sphinx/ext/apidoc/_cli.py +356 -0
- sphinx/ext/apidoc/_generate.py +356 -0
- sphinx/ext/apidoc/_shared.py +66 -0
- sphinx/ext/autodoc/__init__.py +829 -480
- sphinx/ext/autodoc/directive.py +57 -21
- sphinx/ext/autodoc/importer.py +184 -67
- sphinx/ext/autodoc/mock.py +25 -10
- sphinx/ext/autodoc/preserve_defaults.py +17 -9
- sphinx/ext/autodoc/type_comment.py +56 -29
- sphinx/ext/autodoc/typehints.py +49 -26
- sphinx/ext/autosectionlabel.py +28 -11
- sphinx/ext/autosummary/__init__.py +271 -143
- sphinx/ext/autosummary/generate.py +121 -51
- sphinx/ext/coverage.py +152 -91
- sphinx/ext/doctest.py +169 -101
- sphinx/ext/duration.py +12 -6
- sphinx/ext/extlinks.py +33 -21
- sphinx/ext/githubpages.py +8 -8
- sphinx/ext/graphviz.py +175 -109
- sphinx/ext/ifconfig.py +11 -6
- sphinx/ext/imgconverter.py +48 -25
- sphinx/ext/imgmath.py +127 -97
- sphinx/ext/inheritance_diagram.py +177 -103
- sphinx/ext/intersphinx/__init__.py +22 -13
- sphinx/ext/intersphinx/__main__.py +3 -1
- sphinx/ext/intersphinx/_cli.py +18 -14
- sphinx/ext/intersphinx/_load.py +91 -82
- sphinx/ext/intersphinx/_resolve.py +108 -74
- sphinx/ext/intersphinx/_shared.py +2 -2
- sphinx/ext/linkcode.py +28 -12
- sphinx/ext/mathjax.py +60 -29
- sphinx/ext/napoleon/__init__.py +19 -7
- sphinx/ext/napoleon/docstring.py +229 -231
- sphinx/ext/todo.py +44 -49
- sphinx/ext/viewcode.py +105 -57
- sphinx/extension.py +3 -1
- sphinx/highlighting.py +13 -7
- sphinx/io.py +9 -13
- sphinx/jinja2glue.py +29 -26
- sphinx/locale/__init__.py +8 -9
- sphinx/parsers.py +8 -7
- sphinx/project.py +2 -2
- sphinx/pycode/__init__.py +31 -21
- sphinx/pycode/ast.py +6 -3
- sphinx/pycode/parser.py +14 -8
- sphinx/pygments_styles.py +4 -5
- sphinx/registry.py +192 -92
- sphinx/roles.py +58 -7
- sphinx/search/__init__.py +75 -54
- sphinx/search/en.py +11 -13
- sphinx/search/fi.py +1 -1
- sphinx/search/ja.py +8 -6
- sphinx/search/nl.py +1 -1
- sphinx/search/zh.py +19 -21
- sphinx/testing/fixtures.py +26 -29
- sphinx/testing/path.py +26 -62
- sphinx/testing/restructuredtext.py +14 -8
- sphinx/testing/util.py +21 -19
- sphinx/texinputs/make.bat.jinja +50 -50
- sphinx/texinputs/sphinx.sty +4 -3
- sphinx/texinputs/sphinxlatexadmonitions.sty +1 -1
- sphinx/texinputs/sphinxlatexobjects.sty +29 -10
- sphinx/themes/basic/static/searchtools.js +8 -5
- sphinx/theming.py +49 -61
- sphinx/transforms/__init__.py +17 -38
- sphinx/transforms/compact_bullet_list.py +5 -3
- sphinx/transforms/i18n.py +8 -21
- sphinx/transforms/post_transforms/__init__.py +142 -93
- sphinx/transforms/post_transforms/code.py +5 -5
- sphinx/transforms/post_transforms/images.py +28 -24
- sphinx/transforms/references.py +3 -1
- sphinx/util/__init__.py +109 -60
- sphinx/util/_files.py +39 -23
- sphinx/util/_importer.py +4 -1
- sphinx/util/_inventory_file_reader.py +76 -0
- sphinx/util/_io.py +2 -2
- sphinx/util/_lines.py +6 -3
- sphinx/util/_pathlib.py +40 -2
- sphinx/util/build_phase.py +2 -0
- sphinx/util/cfamily.py +19 -14
- sphinx/util/console.py +44 -179
- sphinx/util/display.py +9 -10
- sphinx/util/docfields.py +140 -122
- sphinx/util/docstrings.py +1 -1
- sphinx/util/docutils.py +118 -77
- sphinx/util/fileutil.py +25 -26
- sphinx/util/http_date.py +2 -0
- sphinx/util/i18n.py +77 -64
- sphinx/util/images.py +8 -6
- sphinx/util/inspect.py +147 -38
- sphinx/util/inventory.py +215 -116
- sphinx/util/logging.py +33 -33
- sphinx/util/matching.py +12 -4
- sphinx/util/nodes.py +18 -13
- sphinx/util/osutil.py +38 -39
- sphinx/util/parallel.py +22 -13
- sphinx/util/parsing.py +2 -1
- sphinx/util/png.py +6 -2
- sphinx/util/requests.py +33 -2
- sphinx/util/rst.py +3 -2
- sphinx/util/tags.py +1 -1
- sphinx/util/template.py +18 -10
- sphinx/util/texescape.py +8 -6
- sphinx/util/typing.py +148 -122
- sphinx/versioning.py +3 -3
- sphinx/writers/html.py +3 -1
- sphinx/writers/html5.py +61 -50
- sphinx/writers/latex.py +80 -65
- sphinx/writers/manpage.py +19 -38
- sphinx/writers/texinfo.py +44 -45
- sphinx/writers/text.py +48 -30
- sphinx/writers/xml.py +11 -8
- {sphinx-8.1.3.dist-info → sphinx-8.2.0rc1.dist-info}/LICENSE.rst +1 -1
- {sphinx-8.1.3.dist-info → sphinx-8.2.0rc1.dist-info}/METADATA +23 -15
- {sphinx-8.1.3.dist-info → sphinx-8.2.0rc1.dist-info}/RECORD +190 -186
- {sphinx-8.1.3.dist-info → sphinx-8.2.0rc1.dist-info}/WHEEL +1 -1
- sphinx/builders/html/transforms.py +0 -90
- sphinx/ext/apidoc.py +0 -721
- sphinx/util/exceptions.py +0 -74
- {sphinx-8.1.3.dist-info → sphinx-8.2.0rc1.dist-info}/entry_points.txt +0 -0
sphinx/util/console.py
CHANGED
|
@@ -2,71 +2,44 @@
|
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
|
-
import os
|
|
6
|
-
import re
|
|
7
5
|
import shutil
|
|
8
|
-
import sys
|
|
9
|
-
from typing import TYPE_CHECKING
|
|
10
6
|
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
try:
|
|
42
|
-
# check if colorama is installed to support color on Windows
|
|
43
|
-
import colorama
|
|
44
|
-
|
|
45
|
-
COLORAMA_AVAILABLE = True
|
|
46
|
-
except ImportError:
|
|
47
|
-
COLORAMA_AVAILABLE = False
|
|
48
|
-
|
|
49
|
-
_CSI: Final[str] = re.escape('\x1b[') # 'ESC [': Control Sequence Introducer
|
|
50
|
-
|
|
51
|
-
# Pattern matching ANSI control sequences containing colors.
|
|
52
|
-
_ansi_color_re: Final[re.Pattern[str]] = re.compile(r'\x1b\[(?:\d+;){0,2}\d*m')
|
|
53
|
-
|
|
54
|
-
_ansi_re: Final[re.Pattern[str]] = re.compile(
|
|
55
|
-
_CSI
|
|
56
|
-
+ r"""
|
|
57
|
-
(?:
|
|
58
|
-
(?:\d+;){0,2}\d*m # ANSI color code ('m' is equivalent to '0m')
|
|
59
|
-
|
|
|
60
|
-
[012]?K # ANSI Erase in Line ('K' is equivalent to '0K')
|
|
61
|
-
)""",
|
|
62
|
-
re.VERBOSE | re.ASCII,
|
|
7
|
+
import sphinx._cli.util.colour
|
|
8
|
+
from sphinx._cli.util.colour import ( # NoQA: F401
|
|
9
|
+
_create_input_mode_colour_func,
|
|
10
|
+
black,
|
|
11
|
+
blink,
|
|
12
|
+
blue,
|
|
13
|
+
bold,
|
|
14
|
+
brown,
|
|
15
|
+
colourise,
|
|
16
|
+
darkblue,
|
|
17
|
+
darkgray,
|
|
18
|
+
darkgreen,
|
|
19
|
+
darkred,
|
|
20
|
+
disable_colour,
|
|
21
|
+
enable_colour,
|
|
22
|
+
faint,
|
|
23
|
+
fuchsia,
|
|
24
|
+
green,
|
|
25
|
+
lightgray,
|
|
26
|
+
purple,
|
|
27
|
+
red,
|
|
28
|
+
reset,
|
|
29
|
+
standout,
|
|
30
|
+
teal,
|
|
31
|
+
terminal_supports_colour,
|
|
32
|
+
turquoise,
|
|
33
|
+
underline,
|
|
34
|
+
white,
|
|
35
|
+
yellow,
|
|
63
36
|
)
|
|
64
|
-
|
|
37
|
+
from sphinx._cli.util.errors import strip_escape_sequences
|
|
65
38
|
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
39
|
+
color_terminal = terminal_supports_colour
|
|
40
|
+
nocolor = disable_colour
|
|
41
|
+
coloron = enable_colour
|
|
42
|
+
strip_colors = strip_escape_sequences
|
|
70
43
|
|
|
71
44
|
|
|
72
45
|
def terminal_safe(s: str) -> str:
|
|
@@ -83,7 +56,7 @@ _tw: int = get_terminal_width()
|
|
|
83
56
|
|
|
84
57
|
|
|
85
58
|
def term_width_line(text: str) -> str:
|
|
86
|
-
if
|
|
59
|
+
if sphinx._cli.util.colour._COLOURING_DISABLED:
|
|
87
60
|
# if no coloring, don't output fancy backspaces
|
|
88
61
|
return text + '\n'
|
|
89
62
|
else:
|
|
@@ -91,121 +64,13 @@ def term_width_line(text: str) -> str:
|
|
|
91
64
|
return text.ljust(_tw + len(text) - len(strip_escape_sequences(text))) + '\r'
|
|
92
65
|
|
|
93
66
|
|
|
94
|
-
def color_terminal() -> bool:
|
|
95
|
-
if 'NO_COLOR' in os.environ:
|
|
96
|
-
return False
|
|
97
|
-
if sys.platform == 'win32' and COLORAMA_AVAILABLE:
|
|
98
|
-
colorama.just_fix_windows_console()
|
|
99
|
-
return True
|
|
100
|
-
if 'FORCE_COLOR' in os.environ:
|
|
101
|
-
return True
|
|
102
|
-
if not hasattr(sys.stdout, 'isatty'):
|
|
103
|
-
return False
|
|
104
|
-
if not sys.stdout.isatty():
|
|
105
|
-
return False
|
|
106
|
-
if 'COLORTERM' in os.environ:
|
|
107
|
-
return True
|
|
108
|
-
term = os.environ.get('TERM', 'dumb').lower()
|
|
109
|
-
return term in ('xterm', 'linux') or 'color' in term
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
def nocolor() -> None:
|
|
113
|
-
if sys.platform == 'win32' and COLORAMA_AVAILABLE:
|
|
114
|
-
colorama.deinit()
|
|
115
|
-
codes.clear()
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
def coloron() -> None:
|
|
119
|
-
codes.update(_orig_codes)
|
|
120
|
-
|
|
121
|
-
|
|
122
67
|
def colorize(name: str, text: str, input_mode: bool = False) -> str:
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
else:
|
|
133
|
-
return escape
|
|
134
|
-
|
|
135
|
-
return escseq(name) + text + escseq('reset')
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
def strip_colors(s: str) -> str:
|
|
139
|
-
"""Remove the ANSI color codes in a string *s*.
|
|
140
|
-
|
|
141
|
-
.. caution::
|
|
142
|
-
|
|
143
|
-
This function is not meant to be used in production and should only
|
|
144
|
-
be used for testing Sphinx's output messages.
|
|
145
|
-
|
|
146
|
-
.. seealso:: :func:`strip_escape_sequences`
|
|
147
|
-
"""
|
|
148
|
-
return _ansi_color_re.sub('', s)
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
def strip_escape_sequences(text: str, /) -> str:
|
|
152
|
-
r"""Remove the ANSI CSI colors and "erase in line" sequences.
|
|
153
|
-
|
|
154
|
-
Other `escape sequences `__ (e.g., VT100-specific functions) are not
|
|
155
|
-
supported and only control sequences *natively* known to Sphinx (i.e.,
|
|
156
|
-
colors declared in this module and "erase entire line" (``'\x1b[2K'``))
|
|
157
|
-
are eliminated by this function.
|
|
158
|
-
|
|
159
|
-
.. caution::
|
|
160
|
-
|
|
161
|
-
This function is not meant to be used in production and should only
|
|
162
|
-
be used for testing Sphinx's output messages that were not tempered
|
|
163
|
-
with by third-party extensions.
|
|
164
|
-
|
|
165
|
-
.. versionadded:: 7.3
|
|
166
|
-
|
|
167
|
-
This function is added as an *experimental* feature.
|
|
168
|
-
|
|
169
|
-
__ https://en.wikipedia.org/wiki/ANSI_escape_code
|
|
170
|
-
"""
|
|
171
|
-
return _ansi_re.sub('', text)
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
def create_color_func(name: str) -> None:
|
|
175
|
-
def inner(text: str) -> str:
|
|
176
|
-
return colorize(name, text)
|
|
177
|
-
|
|
178
|
-
globals()[name] = inner
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
_attrs = {
|
|
182
|
-
'reset': '39;49;00m',
|
|
183
|
-
'bold': '01m',
|
|
184
|
-
'faint': '02m',
|
|
185
|
-
'standout': '03m',
|
|
186
|
-
'underline': '04m',
|
|
187
|
-
'blink': '05m',
|
|
188
|
-
}
|
|
189
|
-
|
|
190
|
-
for __name, __value in _attrs.items():
|
|
191
|
-
codes[__name] = '\x1b[' + __value
|
|
192
|
-
|
|
193
|
-
_colors = [
|
|
194
|
-
('black', 'darkgray'),
|
|
195
|
-
('darkred', 'red'),
|
|
196
|
-
('darkgreen', 'green'),
|
|
197
|
-
('brown', 'yellow'),
|
|
198
|
-
('darkblue', 'blue'),
|
|
199
|
-
('purple', 'fuchsia'),
|
|
200
|
-
('turquoise', 'teal'),
|
|
201
|
-
('lightgray', 'white'),
|
|
202
|
-
]
|
|
203
|
-
|
|
204
|
-
for __i, (__dark, __light) in enumerate(_colors, 30):
|
|
205
|
-
codes[__dark] = '\x1b[%im' % __i
|
|
206
|
-
codes[__light] = '\x1b[%im' % (__i + 60)
|
|
207
|
-
|
|
208
|
-
_orig_codes = codes.copy()
|
|
209
|
-
|
|
210
|
-
for _name in codes:
|
|
211
|
-
create_color_func(_name)
|
|
68
|
+
if input_mode:
|
|
69
|
+
colour_func = globals()[name]
|
|
70
|
+
escape_code = getattr(colour_func, '__escape_code', '')
|
|
71
|
+
if not escape_code:
|
|
72
|
+
return colour_func(text)
|
|
73
|
+
inner = _create_input_mode_colour_func(escape_code)
|
|
74
|
+
return inner(text)
|
|
75
|
+
|
|
76
|
+
return colourise(name, text)
|
sphinx/util/display.py
CHANGED
|
@@ -2,16 +2,15 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
import functools
|
|
4
4
|
|
|
5
|
+
from sphinx._cli.util.colour import bold, terminal_supports_colour
|
|
5
6
|
from sphinx.locale import __
|
|
6
7
|
from sphinx.util import logging
|
|
7
|
-
from sphinx.util.console import bold, color_terminal
|
|
8
8
|
|
|
9
|
-
|
|
9
|
+
TYPE_CHECKING = False
|
|
10
|
+
if TYPE_CHECKING:
|
|
10
11
|
from collections.abc import Callable, Iterable, Iterator
|
|
11
12
|
from types import TracebackType
|
|
12
|
-
from typing import Any, TypeVar
|
|
13
|
-
|
|
14
|
-
from typing_extensions import ParamSpec
|
|
13
|
+
from typing import Any, ParamSpec, TypeVar
|
|
15
14
|
|
|
16
15
|
T = TypeVar('T')
|
|
17
16
|
P = ParamSpec('P')
|
|
@@ -37,12 +36,12 @@ def status_iterator(
|
|
|
37
36
|
stringify_func: Callable[[Any], str] = display_chunk,
|
|
38
37
|
) -> Iterator[T]:
|
|
39
38
|
# printing on a single line requires ANSI control sequences
|
|
40
|
-
single_line = verbosity < 1 and
|
|
39
|
+
single_line = verbosity < 1 and terminal_supports_colour()
|
|
41
40
|
bold_summary = bold(summary)
|
|
42
41
|
if length == 0:
|
|
43
42
|
logger.info(bold_summary, nonl=True)
|
|
44
43
|
for item in iterable:
|
|
45
|
-
logger.info(stringify_func(item)
|
|
44
|
+
logger.info('%s ', stringify_func(item), nonl=True, color=color)
|
|
46
45
|
yield item
|
|
47
46
|
else:
|
|
48
47
|
for i, item in enumerate(iterable, start=1):
|
|
@@ -80,14 +79,14 @@ class progress_message:
|
|
|
80
79
|
) -> bool:
|
|
81
80
|
prefix = '' if self.nonl else bold(self.message + ': ')
|
|
82
81
|
if isinstance(val, SkipProgressMessage):
|
|
83
|
-
logger.info(prefix + __('skipped'))
|
|
82
|
+
logger.info(prefix + __('skipped')) # NoQA: G003
|
|
84
83
|
if val.args:
|
|
85
84
|
logger.info(*val.args)
|
|
86
85
|
return True
|
|
87
86
|
elif val:
|
|
88
|
-
logger.info(prefix + __('failed'))
|
|
87
|
+
logger.info(prefix + __('failed')) # NoQA: G003
|
|
89
88
|
else:
|
|
90
|
-
logger.info(prefix + __('done'))
|
|
89
|
+
logger.info(prefix + __('done')) # NoQA: G003
|
|
91
90
|
|
|
92
91
|
return False
|
|
93
92
|
|
sphinx/util/docfields.py
CHANGED
|
@@ -7,10 +7,9 @@ be domain-specifically transformed to a more appealing presentation.
|
|
|
7
7
|
from __future__ import annotations
|
|
8
8
|
|
|
9
9
|
import contextlib
|
|
10
|
-
from typing import TYPE_CHECKING,
|
|
10
|
+
from typing import TYPE_CHECKING, cast
|
|
11
11
|
|
|
12
12
|
from docutils import nodes
|
|
13
|
-
from docutils.nodes import Element, Node
|
|
14
13
|
|
|
15
14
|
from sphinx import addnodes
|
|
16
15
|
from sphinx.locale import __
|
|
@@ -18,12 +17,20 @@ from sphinx.util import logging
|
|
|
18
17
|
from sphinx.util.nodes import get_node_line
|
|
19
18
|
|
|
20
19
|
if TYPE_CHECKING:
|
|
20
|
+
from typing import TypeAlias, TypeVar
|
|
21
|
+
|
|
22
|
+
from docutils.nodes import Element, Node
|
|
21
23
|
from docutils.parsers.rst.states import Inliner
|
|
22
24
|
|
|
23
25
|
from sphinx.directives import ObjectDescription
|
|
24
26
|
from sphinx.environment import BuildEnvironment
|
|
25
27
|
from sphinx.util.typing import TextlikeNode
|
|
26
28
|
|
|
29
|
+
ObjDescT = TypeVar('ObjDescT')
|
|
30
|
+
_FieldEntry: TypeAlias = tuple[str, list[Node]]
|
|
31
|
+
_FieldTypes: TypeAlias = dict[str, list[Node]]
|
|
32
|
+
_EntriesTriple: TypeAlias = tuple['Field', _FieldEntry | list[_FieldEntry], Element]
|
|
33
|
+
|
|
27
34
|
logger = logging.getLogger(__name__)
|
|
28
35
|
|
|
29
36
|
|
|
@@ -131,14 +138,14 @@ class Field:
|
|
|
131
138
|
)
|
|
132
139
|
]
|
|
133
140
|
|
|
134
|
-
def make_entry(self, fieldarg: str, content: list[Node]) ->
|
|
135
|
-
return
|
|
141
|
+
def make_entry(self, fieldarg: str, content: list[Node]) -> _FieldEntry:
|
|
142
|
+
return fieldarg, content
|
|
136
143
|
|
|
137
144
|
def make_field(
|
|
138
145
|
self,
|
|
139
|
-
types:
|
|
146
|
+
types: _FieldTypes,
|
|
140
147
|
domain: str,
|
|
141
|
-
item:
|
|
148
|
+
item: _FieldEntry,
|
|
142
149
|
env: BuildEnvironment | None = None,
|
|
143
150
|
inliner: Inliner | None = None,
|
|
144
151
|
location: Element | None = None,
|
|
@@ -181,8 +188,7 @@ class Field:
|
|
|
181
188
|
|
|
182
189
|
|
|
183
190
|
class GroupedField(Field):
|
|
184
|
-
"""
|
|
185
|
-
A doc field that is grouped; i.e., all fields of that type will be
|
|
191
|
+
"""A doc field that is grouped; i.e., all fields of that type will be
|
|
186
192
|
transformed into one field with its body being a bulleted list. It always
|
|
187
193
|
has an argument. The argument can be linked using the given *rolename*.
|
|
188
194
|
GroupedField should be used for doc fields that can occur more than once.
|
|
@@ -210,9 +216,9 @@ class GroupedField(Field):
|
|
|
210
216
|
|
|
211
217
|
def make_field(
|
|
212
218
|
self,
|
|
213
|
-
types:
|
|
219
|
+
types: _FieldTypes,
|
|
214
220
|
domain: str,
|
|
215
|
-
items:
|
|
221
|
+
items: list[_FieldEntry], # type: ignore[override]
|
|
216
222
|
env: BuildEnvironment | None = None,
|
|
217
223
|
inliner: Inliner | None = None,
|
|
218
224
|
location: Element | None = None,
|
|
@@ -237,7 +243,7 @@ class GroupedField(Field):
|
|
|
237
243
|
listnode += nodes.list_item('', par)
|
|
238
244
|
|
|
239
245
|
if len(items) == 1 and self.can_collapse:
|
|
240
|
-
list_item = cast(nodes.list_item, listnode[0])
|
|
246
|
+
list_item = cast('nodes.list_item', listnode[0])
|
|
241
247
|
fieldbody = nodes.field_body('', list_item[0])
|
|
242
248
|
return nodes.field('', fieldname, fieldbody)
|
|
243
249
|
|
|
@@ -246,8 +252,7 @@ class GroupedField(Field):
|
|
|
246
252
|
|
|
247
253
|
|
|
248
254
|
class TypedField(GroupedField):
|
|
249
|
-
"""
|
|
250
|
-
A doc field that is grouped and has type information for the arguments. It
|
|
255
|
+
"""A doc field that is grouped and has type information for the arguments. It
|
|
251
256
|
always has an argument. The argument can be linked using the given
|
|
252
257
|
*rolename*, the type using the given *typerolename*.
|
|
253
258
|
|
|
@@ -283,9 +288,9 @@ class TypedField(GroupedField):
|
|
|
283
288
|
|
|
284
289
|
def make_field(
|
|
285
290
|
self,
|
|
286
|
-
types:
|
|
291
|
+
types: _FieldTypes,
|
|
287
292
|
domain: str,
|
|
288
|
-
items:
|
|
293
|
+
items: list[_FieldEntry], # type: ignore[override]
|
|
289
294
|
env: BuildEnvironment | None = None,
|
|
290
295
|
inliner: Inliner | None = None,
|
|
291
296
|
location: Element | None = None,
|
|
@@ -338,14 +343,13 @@ class TypedField(GroupedField):
|
|
|
338
343
|
|
|
339
344
|
|
|
340
345
|
class DocFieldTransformer:
|
|
341
|
-
"""
|
|
342
|
-
Transforms field lists in "doc field" syntax into better-looking
|
|
346
|
+
"""Transforms field lists in "doc field" syntax into better-looking
|
|
343
347
|
equivalents, using the field type definitions given on a domain.
|
|
344
348
|
"""
|
|
345
349
|
|
|
346
350
|
typemap: dict[str, tuple[Field, bool]]
|
|
347
351
|
|
|
348
|
-
def __init__(self, directive: ObjectDescription) -> None:
|
|
352
|
+
def __init__(self, directive: ObjectDescription[ObjDescT]) -> None:
|
|
349
353
|
self.directive = directive
|
|
350
354
|
|
|
351
355
|
self.typemap = directive.get_field_type_map()
|
|
@@ -359,115 +363,129 @@ class DocFieldTransformer:
|
|
|
359
363
|
|
|
360
364
|
def transform(self, node: nodes.field_list) -> None:
|
|
361
365
|
"""Transform a single field list *node*."""
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
entries: list[nodes.field | tuple[Field, Any, Element]] = []
|
|
366
|
+
entries: list[nodes.field | _EntriesTriple] = []
|
|
365
367
|
groupindices: dict[str, int] = {}
|
|
366
|
-
types: dict[str,
|
|
368
|
+
types: dict[str, _FieldTypes] = {}
|
|
367
369
|
|
|
368
370
|
# step 1: traverse all fields and collect field types and content
|
|
369
|
-
for field in cast(list[nodes.field], node):
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
371
|
+
for field in cast('list[nodes.field]', node):
|
|
372
|
+
self._transform_step_1(field, entries, types, groupindices)
|
|
373
|
+
|
|
374
|
+
new_list = self._transform_step_2(entries, types)
|
|
375
|
+
node.replace_self(new_list)
|
|
376
|
+
|
|
377
|
+
def _transform_step_1(
|
|
378
|
+
self,
|
|
379
|
+
field: nodes.field,
|
|
380
|
+
entries: list[nodes.field | _EntriesTriple],
|
|
381
|
+
types: dict[str, _FieldTypes],
|
|
382
|
+
group_indices: dict[str, int],
|
|
383
|
+
) -> None:
|
|
384
|
+
assert len(field) == 2
|
|
385
|
+
field_name = cast('nodes.field_name', field[0])
|
|
386
|
+
field_body = cast('nodes.field_body', field[1])
|
|
387
|
+
try:
|
|
388
|
+
# split into field type and argument
|
|
389
|
+
fieldtype_name, fieldarg = field_name.astext().split(None, 1)
|
|
390
|
+
except ValueError:
|
|
391
|
+
# maybe an argument-less field type?
|
|
392
|
+
fieldtype_name, fieldarg = field_name.astext(), ''
|
|
393
|
+
typedesc, is_typefield = self.typemap.get(fieldtype_name, (None, None))
|
|
394
|
+
|
|
395
|
+
# collect the content, trying not to keep unnecessary paragraphs
|
|
396
|
+
if _is_single_paragraph(field_body):
|
|
397
|
+
paragraph = cast('nodes.paragraph', field_body[0])
|
|
398
|
+
content = paragraph.children
|
|
399
|
+
else:
|
|
400
|
+
content = field_body.children
|
|
401
|
+
|
|
402
|
+
# sort out unknown fields
|
|
403
|
+
if typedesc is None or typedesc.has_arg != bool(fieldarg):
|
|
404
|
+
# either the field name is unknown, or the argument doesn't
|
|
405
|
+
# match the spec; capitalize field name and be done with it
|
|
406
|
+
new_fieldname = fieldtype_name[0:1].upper() + fieldtype_name[1:]
|
|
407
|
+
if fieldarg:
|
|
408
|
+
new_fieldname += ' ' + fieldarg
|
|
409
|
+
field_name[0] = nodes.Text(new_fieldname)
|
|
410
|
+
entries.append(field)
|
|
411
|
+
|
|
412
|
+
# but if this has a type then we can at least link it
|
|
413
|
+
if (
|
|
414
|
+
typedesc
|
|
415
|
+
and is_typefield
|
|
416
|
+
and content
|
|
417
|
+
and len(content) == 1
|
|
418
|
+
and isinstance(content[0], nodes.Text)
|
|
419
|
+
):
|
|
420
|
+
typed_field = cast('TypedField', typedesc)
|
|
421
|
+
target = content[0].astext()
|
|
422
|
+
xrefs = typed_field.make_xrefs(
|
|
423
|
+
typed_field.typerolename,
|
|
424
|
+
self.directive.domain or '',
|
|
425
|
+
target,
|
|
426
|
+
contnode=content[0],
|
|
427
|
+
env=self.directive.env,
|
|
428
|
+
)
|
|
429
|
+
if _is_single_paragraph(field_body):
|
|
430
|
+
paragraph = cast('nodes.paragraph', field_body[0])
|
|
431
|
+
paragraph.clear()
|
|
432
|
+
paragraph.extend(xrefs)
|
|
433
|
+
else:
|
|
434
|
+
field_body.clear()
|
|
435
|
+
field_body += nodes.paragraph('', '', *xrefs)
|
|
436
|
+
|
|
437
|
+
return
|
|
438
|
+
|
|
439
|
+
typename = typedesc.name
|
|
440
|
+
|
|
441
|
+
# if the field specifies a type, put it in the types collection
|
|
442
|
+
if is_typefield:
|
|
443
|
+
# filter out only inline nodes; others will result in invalid
|
|
444
|
+
# markup being written out
|
|
445
|
+
content = [n for n in content if isinstance(n, nodes.Inline | nodes.Text)]
|
|
446
|
+
if content:
|
|
447
|
+
types.setdefault(typename, {})[fieldarg] = content
|
|
448
|
+
return
|
|
449
|
+
|
|
450
|
+
# also support syntax like ``:param type name:``
|
|
451
|
+
if typedesc.is_typed:
|
|
373
452
|
try:
|
|
374
|
-
|
|
375
|
-
fieldtype_name, fieldarg = field_name.astext().split(None, 1)
|
|
453
|
+
argtype, argname = fieldarg.rsplit(None, 1)
|
|
376
454
|
except ValueError:
|
|
377
|
-
|
|
378
|
-
fieldtype_name, fieldarg = field_name.astext(), ''
|
|
379
|
-
typedesc, is_typefield = typemap.get(fieldtype_name, (None, None))
|
|
380
|
-
|
|
381
|
-
# collect the content, trying not to keep unnecessary paragraphs
|
|
382
|
-
if _is_single_paragraph(field_body):
|
|
383
|
-
paragraph = cast(nodes.paragraph, field_body[0])
|
|
384
|
-
content = paragraph.children
|
|
455
|
+
pass
|
|
385
456
|
else:
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
and len(content) == 1
|
|
404
|
-
and isinstance(content[0], nodes.Text)
|
|
405
|
-
):
|
|
406
|
-
typed_field = cast(TypedField, typedesc)
|
|
407
|
-
target = content[0].astext()
|
|
408
|
-
xrefs = typed_field.make_xrefs(
|
|
409
|
-
typed_field.typerolename,
|
|
410
|
-
self.directive.domain or '',
|
|
411
|
-
target,
|
|
412
|
-
contnode=content[0],
|
|
413
|
-
env=self.directive.state.document.settings.env,
|
|
414
|
-
)
|
|
415
|
-
if _is_single_paragraph(field_body):
|
|
416
|
-
paragraph = cast(nodes.paragraph, field_body[0])
|
|
417
|
-
paragraph.clear()
|
|
418
|
-
paragraph.extend(xrefs)
|
|
419
|
-
else:
|
|
420
|
-
field_body.clear()
|
|
421
|
-
field_body += nodes.paragraph('', '', *xrefs)
|
|
422
|
-
|
|
423
|
-
continue
|
|
424
|
-
|
|
425
|
-
typename = typedesc.name
|
|
426
|
-
|
|
427
|
-
# if the field specifies a type, put it in the types collection
|
|
428
|
-
if is_typefield:
|
|
429
|
-
# filter out only inline nodes; others will result in invalid
|
|
430
|
-
# markup being written out
|
|
431
|
-
content = [
|
|
432
|
-
n for n in content if isinstance(n, nodes.Inline | nodes.Text)
|
|
433
|
-
]
|
|
434
|
-
if content:
|
|
435
|
-
types.setdefault(typename, {})[fieldarg] = content
|
|
436
|
-
continue
|
|
437
|
-
|
|
438
|
-
# also support syntax like ``:param type name:``
|
|
439
|
-
if typedesc.is_typed:
|
|
440
|
-
try:
|
|
441
|
-
argtype, argname = fieldarg.rsplit(None, 1)
|
|
442
|
-
except ValueError:
|
|
443
|
-
pass
|
|
444
|
-
else:
|
|
445
|
-
types.setdefault(typename, {})[argname] = [nodes.Text(argtype)]
|
|
446
|
-
fieldarg = argname
|
|
447
|
-
|
|
448
|
-
translatable_content = nodes.inline(field_body.rawsource, translatable=True)
|
|
449
|
-
translatable_content.document = field_body.parent.document
|
|
450
|
-
translatable_content.source = field_body.parent.source
|
|
451
|
-
translatable_content.line = field_body.parent.line
|
|
452
|
-
translatable_content += content
|
|
453
|
-
|
|
454
|
-
# grouped entries need to be collected in one entry, while others
|
|
455
|
-
# get one entry per field
|
|
456
|
-
if typedesc.is_grouped:
|
|
457
|
-
if typename in groupindices:
|
|
458
|
-
group = cast(
|
|
459
|
-
tuple[Field, list, Node], entries[groupindices[typename]]
|
|
460
|
-
)
|
|
461
|
-
else:
|
|
462
|
-
groupindices[typename] = len(entries)
|
|
463
|
-
group = (typedesc, [], field)
|
|
464
|
-
entries.append(group)
|
|
465
|
-
new_entry = typedesc.make_entry(fieldarg, [translatable_content])
|
|
466
|
-
group[1].append(new_entry)
|
|
457
|
+
types.setdefault(typename, {})[argname] = [nodes.Text(argtype)]
|
|
458
|
+
fieldarg = argname
|
|
459
|
+
|
|
460
|
+
translatable_content = nodes.inline(field_body.rawsource, translatable=True)
|
|
461
|
+
translatable_content.document = field_body.parent.document
|
|
462
|
+
translatable_content.source = field_body.parent.source
|
|
463
|
+
translatable_content.line = field_body.parent.line
|
|
464
|
+
translatable_content += content
|
|
465
|
+
|
|
466
|
+
# grouped entries need to be collected in one entry, while others
|
|
467
|
+
# get one entry per field
|
|
468
|
+
if typedesc.is_grouped:
|
|
469
|
+
if typename in group_indices:
|
|
470
|
+
group = cast(
|
|
471
|
+
'tuple[Field, list[_FieldEntry], Node]',
|
|
472
|
+
entries[group_indices[typename]],
|
|
473
|
+
)
|
|
467
474
|
else:
|
|
468
|
-
|
|
469
|
-
|
|
475
|
+
group_indices[typename] = len(entries)
|
|
476
|
+
group = (typedesc, [], field)
|
|
477
|
+
entries.append(group)
|
|
478
|
+
new_entry = typedesc.make_entry(fieldarg, [translatable_content])
|
|
479
|
+
group[1].append(new_entry)
|
|
480
|
+
else:
|
|
481
|
+
new_entry = typedesc.make_entry(fieldarg, [translatable_content])
|
|
482
|
+
entries.append((typedesc, new_entry, field))
|
|
470
483
|
|
|
484
|
+
def _transform_step_2(
|
|
485
|
+
self,
|
|
486
|
+
entries: list[nodes.field | _EntriesTriple],
|
|
487
|
+
types: dict[str, _FieldTypes],
|
|
488
|
+
) -> nodes.field_list:
|
|
471
489
|
# step 2: all entries are collected, construct the new field list
|
|
472
490
|
new_list = nodes.field_list()
|
|
473
491
|
for entry in entries:
|
|
@@ -477,16 +495,16 @@ class DocFieldTransformer:
|
|
|
477
495
|
else:
|
|
478
496
|
fieldtype, items, location = entry
|
|
479
497
|
fieldtypes = types.get(fieldtype.name, {})
|
|
480
|
-
env = self.directive.
|
|
498
|
+
env = self.directive.env
|
|
481
499
|
inliner = self.directive.state.inliner
|
|
482
500
|
domain = self.directive.domain or ''
|
|
483
501
|
new_list += fieldtype.make_field(
|
|
484
502
|
fieldtypes,
|
|
485
503
|
domain,
|
|
486
|
-
items,
|
|
504
|
+
items, # type: ignore[arg-type]
|
|
487
505
|
env=env,
|
|
488
506
|
inliner=inliner,
|
|
489
507
|
location=location,
|
|
490
508
|
)
|
|
491
509
|
|
|
492
|
-
|
|
510
|
+
return new_list
|
sphinx/util/docstrings.py
CHANGED