Sphinx 8.1.3__py3-none-any.whl → 8.2.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of Sphinx might be problematic. Click here for more details.
- sphinx/__init__.py +8 -4
- sphinx/__main__.py +2 -0
- sphinx/_cli/__init__.py +2 -5
- sphinx/_cli/util/colour.py +34 -11
- sphinx/_cli/util/errors.py +128 -61
- sphinx/addnodes.py +51 -35
- sphinx/application.py +362 -230
- sphinx/builders/__init__.py +87 -64
- sphinx/builders/_epub_base.py +65 -56
- sphinx/builders/changes.py +17 -23
- sphinx/builders/dirhtml.py +8 -13
- sphinx/builders/epub3.py +70 -38
- sphinx/builders/gettext.py +93 -73
- sphinx/builders/html/__init__.py +240 -186
- sphinx/builders/html/_assets.py +9 -2
- sphinx/builders/html/_build_info.py +3 -0
- sphinx/builders/latex/__init__.py +64 -54
- sphinx/builders/latex/constants.py +14 -11
- sphinx/builders/latex/nodes.py +2 -0
- sphinx/builders/latex/theming.py +8 -9
- sphinx/builders/latex/transforms.py +7 -5
- sphinx/builders/linkcheck.py +193 -149
- sphinx/builders/manpage.py +17 -17
- sphinx/builders/singlehtml.py +28 -16
- sphinx/builders/texinfo.py +28 -21
- sphinx/builders/text.py +10 -15
- sphinx/builders/xml.py +10 -19
- sphinx/cmd/build.py +49 -119
- sphinx/cmd/make_mode.py +35 -31
- sphinx/cmd/quickstart.py +78 -62
- sphinx/config.py +265 -163
- sphinx/directives/__init__.py +51 -54
- sphinx/directives/admonitions.py +107 -0
- sphinx/directives/code.py +24 -19
- sphinx/directives/other.py +21 -42
- sphinx/directives/patches.py +28 -16
- sphinx/domains/__init__.py +54 -31
- sphinx/domains/_domains_container.py +22 -17
- sphinx/domains/_index.py +5 -8
- sphinx/domains/c/__init__.py +366 -245
- sphinx/domains/c/_ast.py +378 -256
- sphinx/domains/c/_ids.py +89 -31
- sphinx/domains/c/_parser.py +283 -214
- sphinx/domains/c/_symbol.py +269 -198
- sphinx/domains/changeset.py +39 -24
- sphinx/domains/citation.py +54 -24
- sphinx/domains/cpp/__init__.py +517 -362
- sphinx/domains/cpp/_ast.py +999 -682
- sphinx/domains/cpp/_ids.py +133 -65
- sphinx/domains/cpp/_parser.py +746 -588
- sphinx/domains/cpp/_symbol.py +692 -489
- sphinx/domains/index.py +10 -8
- sphinx/domains/javascript.py +152 -74
- sphinx/domains/math.py +48 -40
- sphinx/domains/python/__init__.py +402 -211
- sphinx/domains/python/_annotations.py +114 -57
- sphinx/domains/python/_object.py +151 -67
- sphinx/domains/rst.py +94 -49
- sphinx/domains/std/__init__.py +510 -249
- sphinx/environment/__init__.py +345 -61
- sphinx/environment/adapters/asset.py +7 -1
- sphinx/environment/adapters/indexentries.py +15 -20
- sphinx/environment/adapters/toctree.py +19 -9
- sphinx/environment/collectors/__init__.py +3 -1
- sphinx/environment/collectors/asset.py +18 -15
- sphinx/environment/collectors/dependencies.py +8 -10
- sphinx/environment/collectors/metadata.py +6 -4
- sphinx/environment/collectors/title.py +3 -1
- sphinx/environment/collectors/toctree.py +4 -4
- sphinx/errors.py +1 -3
- sphinx/events.py +4 -4
- sphinx/ext/apidoc/__init__.py +21 -0
- sphinx/ext/apidoc/__main__.py +9 -0
- sphinx/ext/apidoc/_cli.py +356 -0
- sphinx/ext/apidoc/_generate.py +356 -0
- sphinx/ext/apidoc/_shared.py +66 -0
- sphinx/ext/autodoc/__init__.py +829 -480
- sphinx/ext/autodoc/directive.py +57 -21
- sphinx/ext/autodoc/importer.py +184 -67
- sphinx/ext/autodoc/mock.py +25 -10
- sphinx/ext/autodoc/preserve_defaults.py +17 -9
- sphinx/ext/autodoc/type_comment.py +56 -29
- sphinx/ext/autodoc/typehints.py +49 -26
- sphinx/ext/autosectionlabel.py +28 -11
- sphinx/ext/autosummary/__init__.py +271 -143
- sphinx/ext/autosummary/generate.py +121 -51
- sphinx/ext/coverage.py +152 -91
- sphinx/ext/doctest.py +169 -101
- sphinx/ext/duration.py +12 -6
- sphinx/ext/extlinks.py +33 -21
- sphinx/ext/githubpages.py +8 -8
- sphinx/ext/graphviz.py +175 -109
- sphinx/ext/ifconfig.py +11 -6
- sphinx/ext/imgconverter.py +48 -25
- sphinx/ext/imgmath.py +127 -97
- sphinx/ext/inheritance_diagram.py +177 -103
- sphinx/ext/intersphinx/__init__.py +22 -13
- sphinx/ext/intersphinx/__main__.py +3 -1
- sphinx/ext/intersphinx/_cli.py +18 -14
- sphinx/ext/intersphinx/_load.py +91 -82
- sphinx/ext/intersphinx/_resolve.py +108 -74
- sphinx/ext/intersphinx/_shared.py +2 -2
- sphinx/ext/linkcode.py +28 -12
- sphinx/ext/mathjax.py +60 -29
- sphinx/ext/napoleon/__init__.py +19 -7
- sphinx/ext/napoleon/docstring.py +229 -231
- sphinx/ext/todo.py +44 -49
- sphinx/ext/viewcode.py +105 -57
- sphinx/extension.py +3 -1
- sphinx/highlighting.py +13 -7
- sphinx/io.py +9 -13
- sphinx/jinja2glue.py +29 -26
- sphinx/locale/__init__.py +8 -9
- sphinx/parsers.py +8 -7
- sphinx/project.py +2 -2
- sphinx/pycode/__init__.py +31 -21
- sphinx/pycode/ast.py +6 -3
- sphinx/pycode/parser.py +14 -8
- sphinx/pygments_styles.py +4 -5
- sphinx/registry.py +192 -92
- sphinx/roles.py +58 -7
- sphinx/search/__init__.py +75 -54
- sphinx/search/en.py +11 -13
- sphinx/search/fi.py +1 -1
- sphinx/search/ja.py +8 -6
- sphinx/search/nl.py +1 -1
- sphinx/search/zh.py +19 -21
- sphinx/testing/fixtures.py +26 -29
- sphinx/testing/path.py +26 -62
- sphinx/testing/restructuredtext.py +14 -8
- sphinx/testing/util.py +21 -19
- sphinx/texinputs/make.bat.jinja +50 -50
- sphinx/texinputs/sphinx.sty +4 -3
- sphinx/texinputs/sphinxlatexadmonitions.sty +1 -1
- sphinx/texinputs/sphinxlatexobjects.sty +29 -10
- sphinx/themes/basic/static/searchtools.js +8 -5
- sphinx/theming.py +49 -61
- sphinx/transforms/__init__.py +17 -38
- sphinx/transforms/compact_bullet_list.py +5 -3
- sphinx/transforms/i18n.py +8 -21
- sphinx/transforms/post_transforms/__init__.py +142 -93
- sphinx/transforms/post_transforms/code.py +5 -5
- sphinx/transforms/post_transforms/images.py +28 -24
- sphinx/transforms/references.py +3 -1
- sphinx/util/__init__.py +109 -60
- sphinx/util/_files.py +39 -23
- sphinx/util/_importer.py +4 -1
- sphinx/util/_inventory_file_reader.py +76 -0
- sphinx/util/_io.py +2 -2
- sphinx/util/_lines.py +6 -3
- sphinx/util/_pathlib.py +40 -2
- sphinx/util/build_phase.py +2 -0
- sphinx/util/cfamily.py +19 -14
- sphinx/util/console.py +44 -179
- sphinx/util/display.py +9 -10
- sphinx/util/docfields.py +140 -122
- sphinx/util/docstrings.py +1 -1
- sphinx/util/docutils.py +118 -77
- sphinx/util/fileutil.py +25 -26
- sphinx/util/http_date.py +2 -0
- sphinx/util/i18n.py +77 -64
- sphinx/util/images.py +8 -6
- sphinx/util/inspect.py +147 -38
- sphinx/util/inventory.py +215 -116
- sphinx/util/logging.py +33 -33
- sphinx/util/matching.py +12 -4
- sphinx/util/nodes.py +18 -13
- sphinx/util/osutil.py +38 -39
- sphinx/util/parallel.py +22 -13
- sphinx/util/parsing.py +2 -1
- sphinx/util/png.py +6 -2
- sphinx/util/requests.py +33 -2
- sphinx/util/rst.py +3 -2
- sphinx/util/tags.py +1 -1
- sphinx/util/template.py +18 -10
- sphinx/util/texescape.py +8 -6
- sphinx/util/typing.py +148 -122
- sphinx/versioning.py +3 -3
- sphinx/writers/html.py +3 -1
- sphinx/writers/html5.py +61 -50
- sphinx/writers/latex.py +80 -65
- sphinx/writers/manpage.py +19 -38
- sphinx/writers/texinfo.py +44 -45
- sphinx/writers/text.py +48 -30
- sphinx/writers/xml.py +11 -8
- {sphinx-8.1.3.dist-info → sphinx-8.2.0rc1.dist-info}/LICENSE.rst +1 -1
- {sphinx-8.1.3.dist-info → sphinx-8.2.0rc1.dist-info}/METADATA +23 -15
- {sphinx-8.1.3.dist-info → sphinx-8.2.0rc1.dist-info}/RECORD +190 -186
- {sphinx-8.1.3.dist-info → sphinx-8.2.0rc1.dist-info}/WHEEL +1 -1
- sphinx/builders/html/transforms.py +0 -90
- sphinx/ext/apidoc.py +0 -721
- sphinx/util/exceptions.py +0 -74
- {sphinx-8.1.3.dist-info → sphinx-8.2.0rc1.dist-info}/entry_points.txt +0 -0
sphinx/search/__init__.py
CHANGED
|
@@ -6,27 +6,44 @@ import dataclasses
|
|
|
6
6
|
import functools
|
|
7
7
|
import html
|
|
8
8
|
import json
|
|
9
|
+
import os
|
|
9
10
|
import pickle
|
|
10
11
|
import re
|
|
11
12
|
from importlib import import_module
|
|
12
|
-
from
|
|
13
|
-
from typing import IO, TYPE_CHECKING, Any
|
|
13
|
+
from typing import TYPE_CHECKING
|
|
14
14
|
|
|
15
15
|
from docutils import nodes
|
|
16
|
-
from docutils.nodes import Element
|
|
16
|
+
from docutils.nodes import Element
|
|
17
17
|
|
|
18
18
|
from sphinx import addnodes, package_dir
|
|
19
|
+
from sphinx.util._pathlib import _StrPath
|
|
19
20
|
from sphinx.util.index_entries import split_index_msg
|
|
20
21
|
|
|
21
22
|
if TYPE_CHECKING:
|
|
22
23
|
from collections.abc import Callable, Iterable
|
|
24
|
+
from typing import Any, Protocol, TypeVar
|
|
25
|
+
|
|
26
|
+
from docutils.nodes import Node
|
|
23
27
|
|
|
24
28
|
from sphinx.environment import BuildEnvironment
|
|
25
29
|
|
|
30
|
+
_T_co = TypeVar('_T_co', covariant=True)
|
|
31
|
+
_T_contra = TypeVar('_T_contra', contravariant=True)
|
|
32
|
+
|
|
33
|
+
class _ReadableStream(Protocol[_T_co]):
|
|
34
|
+
def read(self, n: int = ..., /) -> _T_co: ...
|
|
35
|
+
def readline(self, n: int = ..., /) -> _T_co: ...
|
|
36
|
+
|
|
37
|
+
class _WritableStream(Protocol[_T_contra]):
|
|
38
|
+
def write(self, s: _T_contra, /) -> object: ...
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
_NON_MINIFIED_JS_PATH = package_dir.joinpath('search', 'non-minified-js')
|
|
42
|
+
_MINIFIED_JS_PATH = package_dir.joinpath('search', 'minified-js')
|
|
43
|
+
|
|
26
44
|
|
|
27
45
|
class SearchLanguage:
|
|
28
|
-
"""
|
|
29
|
-
This class is the base class for search natural language preprocessors. If
|
|
46
|
+
"""This class is the base class for search natural language preprocessors. If
|
|
30
47
|
you want to add support for a new language, you should override the methods
|
|
31
48
|
of this class.
|
|
32
49
|
|
|
@@ -78,21 +95,17 @@ var Stemmer = function() {
|
|
|
78
95
|
self.init(options)
|
|
79
96
|
|
|
80
97
|
def init(self, options: dict[str, str]) -> None:
|
|
81
|
-
"""
|
|
82
|
-
Initialize the class with the options the user has given.
|
|
83
|
-
"""
|
|
98
|
+
"""Initialize the class with the options the user has given."""
|
|
84
99
|
|
|
85
100
|
def split(self, input: str) -> list[str]:
|
|
86
|
-
"""
|
|
87
|
-
This method splits a sentence into words. Default splitter splits input
|
|
101
|
+
"""This method splits a sentence into words. Default splitter splits input
|
|
88
102
|
at white spaces, which should be enough for most languages except CJK
|
|
89
103
|
languages.
|
|
90
104
|
"""
|
|
91
105
|
return self._word_re.findall(input)
|
|
92
106
|
|
|
93
107
|
def stem(self, word: str) -> str:
|
|
94
|
-
"""
|
|
95
|
-
This method implements stemming algorithm of the Python version.
|
|
108
|
+
"""This method implements stemming algorithm of the Python version.
|
|
96
109
|
|
|
97
110
|
Default implementation does nothing. You should implement this if the
|
|
98
111
|
language has any stemming rules.
|
|
@@ -104,8 +117,7 @@ var Stemmer = function() {
|
|
|
104
117
|
return word
|
|
105
118
|
|
|
106
119
|
def word_filter(self, word: str) -> bool:
|
|
107
|
-
"""
|
|
108
|
-
Return true if the target word should be registered in the search index.
|
|
120
|
+
"""Return true if the target word should be registered in the search index.
|
|
109
121
|
This method is called after stemming.
|
|
110
122
|
"""
|
|
111
123
|
return len(word) == 0 or not (
|
|
@@ -119,8 +131,7 @@ from sphinx.search.en import SearchEnglish # NoQA: E402
|
|
|
119
131
|
|
|
120
132
|
|
|
121
133
|
def parse_stop_word(source: str) -> set[str]:
|
|
122
|
-
"""
|
|
123
|
-
Parse snowball style word list like this:
|
|
134
|
+
"""Parse snowball style word list like this:
|
|
124
135
|
|
|
125
136
|
* https://snowball.tartarus.org/algorithms/finnish/stop.txt
|
|
126
137
|
"""
|
|
@@ -154,8 +165,7 @@ languages: dict[str, str | type[SearchLanguage]] = {
|
|
|
154
165
|
|
|
155
166
|
|
|
156
167
|
class _JavaScriptIndex:
|
|
157
|
-
"""
|
|
158
|
-
The search index as JavaScript file that calls a function
|
|
168
|
+
"""The search index as JavaScript file that calls a function
|
|
159
169
|
on the documentation search object to register the index.
|
|
160
170
|
"""
|
|
161
171
|
|
|
@@ -163,7 +173,8 @@ class _JavaScriptIndex:
|
|
|
163
173
|
SUFFIX = ')'
|
|
164
174
|
|
|
165
175
|
def dumps(self, data: Any) -> str:
|
|
166
|
-
|
|
176
|
+
data_json = json.dumps(data, separators=(',', ':'), sort_keys=True)
|
|
177
|
+
return self.PREFIX + data_json + self.SUFFIX
|
|
167
178
|
|
|
168
179
|
def loads(self, s: str) -> Any:
|
|
169
180
|
data = s[len(self.PREFIX) : -len(self.SUFFIX)]
|
|
@@ -172,10 +183,10 @@ class _JavaScriptIndex:
|
|
|
172
183
|
raise ValueError(msg)
|
|
173
184
|
return json.loads(data)
|
|
174
185
|
|
|
175
|
-
def dump(self, data: Any, f:
|
|
186
|
+
def dump(self, data: Any, f: _WritableStream[str]) -> None:
|
|
176
187
|
f.write(self.dumps(data))
|
|
177
188
|
|
|
178
|
-
def load(self, f:
|
|
189
|
+
def load(self, f: _ReadableStream[str]) -> Any:
|
|
179
190
|
return self.loads(f.read())
|
|
180
191
|
|
|
181
192
|
|
|
@@ -203,20 +214,21 @@ class WordStore:
|
|
|
203
214
|
|
|
204
215
|
|
|
205
216
|
class WordCollector(nodes.NodeVisitor):
|
|
206
|
-
"""
|
|
207
|
-
A special visitor that collects words for the `IndexBuilder`.
|
|
208
|
-
"""
|
|
217
|
+
"""A special visitor that collects words for the `IndexBuilder`."""
|
|
209
218
|
|
|
210
219
|
def __init__(self, document: nodes.document, lang: SearchLanguage) -> None:
|
|
211
220
|
super().__init__(document)
|
|
212
221
|
self.found_words: list[str] = []
|
|
213
|
-
self.found_titles: list[tuple[str, str]] = []
|
|
222
|
+
self.found_titles: list[tuple[str, str | None]] = []
|
|
214
223
|
self.found_title_words: list[str] = []
|
|
215
224
|
self.lang = lang
|
|
216
225
|
|
|
217
226
|
def dispatch_visit(self, node: Node) -> None:
|
|
218
227
|
if isinstance(node, nodes.comment):
|
|
219
228
|
raise nodes.SkipNode
|
|
229
|
+
elif isinstance(node, nodes.Element) and 'no-search' in node['classes']:
|
|
230
|
+
# skip nodes marked with a 'no-search' class
|
|
231
|
+
raise nodes.SkipNode
|
|
220
232
|
elif isinstance(node, nodes.raw):
|
|
221
233
|
if 'html' in node.get('format', '').split():
|
|
222
234
|
# Some people might put content in raw HTML that should be searched,
|
|
@@ -241,8 +253,10 @@ class WordCollector(nodes.NodeVisitor):
|
|
|
241
253
|
self.found_words.extend(self.lang.split(node.astext()))
|
|
242
254
|
elif isinstance(node, nodes.title):
|
|
243
255
|
title = node.astext()
|
|
244
|
-
ids
|
|
245
|
-
|
|
256
|
+
if ids := node.parent['ids']:
|
|
257
|
+
self.found_titles.append((title, ids[0]))
|
|
258
|
+
else:
|
|
259
|
+
self.found_titles.append((title, None))
|
|
246
260
|
self.found_title_words.extend(self.lang.split(title))
|
|
247
261
|
elif isinstance(node, Element) and _is_meta_keywords(node, self.lang.lang): # type: ignore[arg-type]
|
|
248
262
|
keywords = node['content']
|
|
@@ -251,8 +265,7 @@ class WordCollector(nodes.NodeVisitor):
|
|
|
251
265
|
|
|
252
266
|
|
|
253
267
|
class IndexBuilder:
|
|
254
|
-
"""
|
|
255
|
-
Helper class that creates a search index based on the doctrees
|
|
268
|
+
"""Helper class that creates a search index based on the doctrees
|
|
256
269
|
passed to the `feed` method.
|
|
257
270
|
"""
|
|
258
271
|
|
|
@@ -264,7 +277,8 @@ class IndexBuilder:
|
|
|
264
277
|
def __init__(
|
|
265
278
|
self, env: BuildEnvironment, lang: str, options: dict[str, str], scoring: str
|
|
266
279
|
) -> None:
|
|
267
|
-
self.
|
|
280
|
+
self._domains = env.domains
|
|
281
|
+
self._env_version = env.version
|
|
268
282
|
# docname -> title
|
|
269
283
|
self._titles: dict[str, str | None] = env._search_index_titles
|
|
270
284
|
# docname -> filename
|
|
@@ -309,13 +323,16 @@ class IndexBuilder:
|
|
|
309
323
|
self.js_scorer_code = ''
|
|
310
324
|
self.js_splitter_code = ''
|
|
311
325
|
|
|
312
|
-
def load(self, stream:
|
|
326
|
+
def load(self, stream: _ReadableStream[str | bytes], format: Any) -> None:
|
|
313
327
|
"""Reconstruct from frozen data."""
|
|
314
328
|
if isinstance(format, str):
|
|
315
329
|
format = self.formats[format]
|
|
316
330
|
frozen = format.load(stream)
|
|
317
331
|
# if an old index is present, we treat it as not existing.
|
|
318
|
-
if
|
|
332
|
+
if (
|
|
333
|
+
not isinstance(frozen, dict)
|
|
334
|
+
or frozen.get('envversion') != self._env_version
|
|
335
|
+
):
|
|
319
336
|
msg = 'old format'
|
|
320
337
|
raise ValueError(msg)
|
|
321
338
|
index2fn = frozen['docnames']
|
|
@@ -342,7 +359,9 @@ class IndexBuilder:
|
|
|
342
359
|
self._title_mapping = load_terms(frozen['titleterms'])
|
|
343
360
|
# no need to load keywords/objtypes
|
|
344
361
|
|
|
345
|
-
def dump(
|
|
362
|
+
def dump(
|
|
363
|
+
self, stream: _WritableStream[str] | _WritableStream[bytes], format: Any
|
|
364
|
+
) -> None:
|
|
346
365
|
"""Dump the frozen index to a stream."""
|
|
347
366
|
if isinstance(format, str):
|
|
348
367
|
format = self.formats[format]
|
|
@@ -354,7 +373,7 @@ class IndexBuilder:
|
|
|
354
373
|
rv: dict[str, list[tuple[int, int, int, str, str]]] = {}
|
|
355
374
|
otypes = self._objtypes
|
|
356
375
|
onames = self._objnames
|
|
357
|
-
for domain in self.
|
|
376
|
+
for domain in self._domains.sorted():
|
|
358
377
|
sorted_objects = sorted(domain.get_objects())
|
|
359
378
|
for fullname, dispname, type, docname, anchor, prio in sorted_objects:
|
|
360
379
|
if docname not in fn2index:
|
|
@@ -392,12 +411,12 @@ class IndexBuilder:
|
|
|
392
411
|
def get_terms(
|
|
393
412
|
self, fn2index: dict[str, int]
|
|
394
413
|
) -> tuple[dict[str, list[int] | int], dict[str, list[int] | int]]:
|
|
395
|
-
"""
|
|
396
|
-
Return a mapping of document and title terms to their corresponding sorted document IDs.
|
|
414
|
+
"""Return a mapping of document and title terms to sorted document IDs.
|
|
397
415
|
|
|
398
|
-
When a term is only found within a single document,
|
|
399
|
-
|
|
400
|
-
|
|
416
|
+
When a term is only found within a single document,
|
|
417
|
+
then the value for that term will be an integer value.
|
|
418
|
+
When a term is found within multiple documents,
|
|
419
|
+
the value will be a list of integers.
|
|
401
420
|
"""
|
|
402
421
|
rvs: tuple[dict[str, list[int] | int], dict[str, list[int] | int]] = ({}, {})
|
|
403
422
|
for rv, mapping in zip(rvs, (self._mapping, self._title_mapping), strict=True):
|
|
@@ -444,7 +463,7 @@ class IndexBuilder:
|
|
|
444
463
|
'objtypes': objtypes,
|
|
445
464
|
'objnames': objnames,
|
|
446
465
|
'titleterms': title_terms,
|
|
447
|
-
'envversion': self.
|
|
466
|
+
'envversion': self._env_version,
|
|
448
467
|
'alltitles': alltitles,
|
|
449
468
|
'indexentries': index_entries,
|
|
450
469
|
}
|
|
@@ -471,11 +490,15 @@ class IndexBuilder:
|
|
|
471
490
|
wordnames.intersection_update(docnames)
|
|
472
491
|
|
|
473
492
|
def feed(
|
|
474
|
-
self,
|
|
493
|
+
self,
|
|
494
|
+
docname: str,
|
|
495
|
+
filename: str | os.PathLike[str],
|
|
496
|
+
title: str,
|
|
497
|
+
doctree: nodes.document,
|
|
475
498
|
) -> None:
|
|
476
499
|
"""Feed a doctree to the index."""
|
|
477
500
|
self._titles[docname] = title
|
|
478
|
-
self._filenames[docname] = filename
|
|
501
|
+
self._filenames[docname] = os.fspath(filename)
|
|
479
502
|
|
|
480
503
|
word_store = self._word_collector(doctree)
|
|
481
504
|
|
|
@@ -546,12 +569,12 @@ class IndexBuilder:
|
|
|
546
569
|
'search_word_splitter_code': js_splitter_code,
|
|
547
570
|
}
|
|
548
571
|
|
|
549
|
-
def get_js_stemmer_rawcodes(self) -> list[
|
|
572
|
+
def get_js_stemmer_rawcodes(self) -> list[_StrPath]:
|
|
550
573
|
"""Returns a list of non-minified stemmer JS files to copy."""
|
|
551
574
|
if self.lang.js_stemmer_rawcode:
|
|
552
575
|
return [
|
|
553
|
-
|
|
554
|
-
|
|
576
|
+
_StrPath(_NON_MINIFIED_JS_PATH / 'base-stemmer.js'),
|
|
577
|
+
_StrPath(_NON_MINIFIED_JS_PATH / self.lang.js_stemmer_rawcode),
|
|
555
578
|
]
|
|
556
579
|
else:
|
|
557
580
|
return []
|
|
@@ -562,15 +585,10 @@ class IndexBuilder:
|
|
|
562
585
|
def get_js_stemmer_code(self) -> str:
|
|
563
586
|
"""Returns JS code that will be inserted into language_data.js."""
|
|
564
587
|
if self.lang.js_stemmer_rawcode:
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
base_js = js_file.read()
|
|
570
|
-
with open(
|
|
571
|
-
path.join(js_dir, self.lang.js_stemmer_rawcode), encoding='utf-8'
|
|
572
|
-
) as js_file:
|
|
573
|
-
language_js = js_file.read()
|
|
588
|
+
base_js_path = _NON_MINIFIED_JS_PATH / 'base-stemmer.js'
|
|
589
|
+
language_js_path = _NON_MINIFIED_JS_PATH / self.lang.js_stemmer_rawcode
|
|
590
|
+
base_js = base_js_path.read_text(encoding='utf-8')
|
|
591
|
+
language_js = language_js_path.read_text(encoding='utf-8')
|
|
574
592
|
return (
|
|
575
593
|
f'{base_js}\n{language_js}\nStemmer = {self.lang.language_name}Stemmer;'
|
|
576
594
|
)
|
|
@@ -587,6 +605,9 @@ def _feed_visit_nodes(
|
|
|
587
605
|
) -> None:
|
|
588
606
|
if isinstance(node, nodes.comment):
|
|
589
607
|
return
|
|
608
|
+
elif isinstance(node, nodes.Element) and 'no-search' in node['classes']:
|
|
609
|
+
# skip nodes marked with a 'no-search' class
|
|
610
|
+
return
|
|
590
611
|
elif isinstance(node, nodes.raw):
|
|
591
612
|
if 'html' in node.get('format', '').split():
|
|
592
613
|
# Some people might put content in raw HTML that should be searched,
|
sphinx/search/en.py
CHANGED
|
@@ -6,19 +6,17 @@ import snowballstemmer
|
|
|
6
6
|
|
|
7
7
|
from sphinx.search import SearchLanguage
|
|
8
8
|
|
|
9
|
-
english_stopwords =
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
""".split()
|
|
21
|
-
)
|
|
9
|
+
english_stopwords = {
|
|
10
|
+
'a', 'and', 'are', 'as', 'at',
|
|
11
|
+
'be', 'but', 'by',
|
|
12
|
+
'for',
|
|
13
|
+
'if', 'in', 'into', 'is', 'it',
|
|
14
|
+
'near', 'no', 'not',
|
|
15
|
+
'of', 'on', 'or',
|
|
16
|
+
'such',
|
|
17
|
+
'that', 'the', 'their', 'then', 'there', 'these', 'they', 'this', 'to',
|
|
18
|
+
'was', 'will', 'with',
|
|
19
|
+
} # fmt: skip
|
|
22
20
|
|
|
23
21
|
js_porter_stemmer = """
|
|
24
22
|
/**
|
sphinx/search/fi.py
CHANGED
sphinx/search/ja.py
CHANGED
|
@@ -13,7 +13,11 @@ from __future__ import annotations
|
|
|
13
13
|
import os
|
|
14
14
|
import re
|
|
15
15
|
import sys
|
|
16
|
-
from
|
|
16
|
+
from pathlib import Path
|
|
17
|
+
from typing import TYPE_CHECKING
|
|
18
|
+
|
|
19
|
+
if TYPE_CHECKING:
|
|
20
|
+
from typing import Any
|
|
17
21
|
|
|
18
22
|
try:
|
|
19
23
|
import MeCab # type: ignore[import-not-found]
|
|
@@ -39,8 +43,7 @@ class BaseSplitter:
|
|
|
39
43
|
self.options = options
|
|
40
44
|
|
|
41
45
|
def split(self, input: str) -> list[str]:
|
|
42
|
-
"""
|
|
43
|
-
:param str input:
|
|
46
|
+
""":param str input:
|
|
44
47
|
:return:
|
|
45
48
|
:rtype: list[str]
|
|
46
49
|
"""
|
|
@@ -89,7 +92,7 @@ class MecabSplitter(BaseSplitter):
|
|
|
89
92
|
libpath = ctypes.util.find_library(lib)
|
|
90
93
|
else:
|
|
91
94
|
libpath = None
|
|
92
|
-
if
|
|
95
|
+
if Path(lib).exists():
|
|
93
96
|
libpath = lib
|
|
94
97
|
if libpath is None:
|
|
95
98
|
msg = 'MeCab dynamic library is not available'
|
|
@@ -513,8 +516,7 @@ class DefaultSplitter(BaseSplitter):
|
|
|
513
516
|
|
|
514
517
|
|
|
515
518
|
class SearchJapanese(SearchLanguage):
|
|
516
|
-
"""
|
|
517
|
-
Japanese search implementation: uses no stemmer, but word splitting is quite
|
|
519
|
+
"""Japanese search implementation: uses no stemmer, but word splitting is quite
|
|
518
520
|
complicated.
|
|
519
521
|
"""
|
|
520
522
|
|
sphinx/search/nl.py
CHANGED
sphinx/search/zh.py
CHANGED
|
@@ -2,8 +2,8 @@
|
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
|
-
import os
|
|
6
5
|
import re
|
|
6
|
+
from pathlib import Path
|
|
7
7
|
|
|
8
8
|
import snowballstemmer
|
|
9
9
|
|
|
@@ -13,22 +13,22 @@ try:
|
|
|
13
13
|
import jieba # type: ignore[import-not-found]
|
|
14
14
|
|
|
15
15
|
JIEBA = True
|
|
16
|
+
JIEBA_DEFAULT_DICT = Path(jieba.__file__).parent / jieba.DEFAULT_DICT_NAME
|
|
16
17
|
except ImportError:
|
|
17
18
|
JIEBA = False
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
a
|
|
22
|
-
be
|
|
23
|
-
for
|
|
24
|
-
if
|
|
25
|
-
near
|
|
26
|
-
of
|
|
27
|
-
such
|
|
28
|
-
that
|
|
29
|
-
was
|
|
30
|
-
|
|
31
|
-
)
|
|
19
|
+
JIEBA_DEFAULT_DICT = Path()
|
|
20
|
+
|
|
21
|
+
english_stopwords = {
|
|
22
|
+
'a', 'and', 'are', 'as', 'at',
|
|
23
|
+
'be', 'but', 'by',
|
|
24
|
+
'for',
|
|
25
|
+
'if', 'in', 'into', 'is', 'it',
|
|
26
|
+
'near', 'no', 'not',
|
|
27
|
+
'of', 'on', 'or',
|
|
28
|
+
'such',
|
|
29
|
+
'that', 'the', 'their', 'then', 'there', 'these', 'they', 'this', 'to',
|
|
30
|
+
'was', 'will', 'with',
|
|
31
|
+
} # fmt: skip
|
|
32
32
|
|
|
33
33
|
js_porter_stemmer = """
|
|
34
34
|
/**
|
|
@@ -218,9 +218,7 @@ iti|ous|ive|ize)$/;
|
|
|
218
218
|
|
|
219
219
|
|
|
220
220
|
class SearchChinese(SearchLanguage):
|
|
221
|
-
"""
|
|
222
|
-
Chinese search implementation
|
|
223
|
-
"""
|
|
221
|
+
"""Chinese search implementation"""
|
|
224
222
|
|
|
225
223
|
lang = 'zh'
|
|
226
224
|
language_name = 'Chinese'
|
|
@@ -234,8 +232,8 @@ class SearchChinese(SearchLanguage):
|
|
|
234
232
|
|
|
235
233
|
def init(self, options: dict[str, str]) -> None:
|
|
236
234
|
if JIEBA:
|
|
237
|
-
dict_path = options.get('dict')
|
|
238
|
-
if dict_path and
|
|
235
|
+
dict_path = options.get('dict', JIEBA_DEFAULT_DICT)
|
|
236
|
+
if dict_path and Path(dict_path).is_file():
|
|
239
237
|
jieba.load_userdict(dict_path)
|
|
240
238
|
|
|
241
239
|
self.stemmer = snowballstemmer.stemmer('english')
|
|
@@ -260,7 +258,7 @@ class SearchChinese(SearchLanguage):
|
|
|
260
258
|
stemmed = self.stemmer.stemWord(word.lower())
|
|
261
259
|
should_not_be_stemmed = (
|
|
262
260
|
len(word) >= 3 > len(stemmed) and word in self.latin_terms
|
|
263
|
-
)
|
|
261
|
+
)
|
|
264
262
|
if should_not_be_stemmed:
|
|
265
263
|
return word.lower()
|
|
266
264
|
return stemmed
|
sphinx/testing/fixtures.py
CHANGED
|
@@ -73,11 +73,10 @@ def app_params(
|
|
|
73
73
|
request: Any,
|
|
74
74
|
test_params: dict[str, Any],
|
|
75
75
|
shared_result: SharedResult,
|
|
76
|
-
sphinx_test_tempdir:
|
|
77
|
-
rootdir: Path,
|
|
76
|
+
sphinx_test_tempdir: Path,
|
|
77
|
+
rootdir: Path | None,
|
|
78
78
|
) -> _app_params:
|
|
79
|
-
"""
|
|
80
|
-
Parameters that are specified by 'pytest.mark.sphinx' for
|
|
79
|
+
"""Parameters that are specified by 'pytest.mark.sphinx' for
|
|
81
80
|
sphinx.application.Sphinx initialization
|
|
82
81
|
"""
|
|
83
82
|
# ##### process pytest.mark.sphinx
|
|
@@ -103,24 +102,31 @@ def app_params(
|
|
|
103
102
|
|
|
104
103
|
# ##### prepare Application params
|
|
105
104
|
|
|
106
|
-
|
|
107
|
-
kwargs['srcdir'] = srcdir = sphinx_test_tempdir / kwargs.get('srcdir',
|
|
105
|
+
test_root = kwargs.pop('testroot', 'root')
|
|
106
|
+
kwargs['srcdir'] = srcdir = sphinx_test_tempdir / kwargs.get('srcdir', test_root)
|
|
107
|
+
copy_test_root = not {'srcdir', 'copy_test_root'}.isdisjoint(kwargs)
|
|
108
108
|
|
|
109
109
|
# special support for sphinx/tests
|
|
110
|
-
if rootdir
|
|
111
|
-
|
|
112
|
-
|
|
110
|
+
if rootdir is not None:
|
|
111
|
+
test_root_path = rootdir / f'test-{test_root}'
|
|
112
|
+
if copy_test_root:
|
|
113
|
+
if test_root_path.is_dir():
|
|
114
|
+
shutil.copytree(test_root_path, srcdir, dirs_exist_ok=True)
|
|
115
|
+
else:
|
|
116
|
+
kwargs['srcdir'] = test_root_path
|
|
117
|
+
|
|
118
|
+
# always write to the temporary directory
|
|
119
|
+
kwargs.setdefault('builddir', srcdir / '_build')
|
|
113
120
|
|
|
114
121
|
return _app_params(args, kwargs)
|
|
115
122
|
|
|
116
123
|
|
|
117
|
-
_app_params = namedtuple('_app_params', 'args,kwargs')
|
|
124
|
+
_app_params = namedtuple('_app_params', 'args,kwargs') # NoQA: PYI024
|
|
118
125
|
|
|
119
126
|
|
|
120
127
|
@pytest.fixture
|
|
121
128
|
def test_params(request: Any) -> dict[str, Any]:
|
|
122
|
-
"""
|
|
123
|
-
Test parameters that are specified by 'pytest.mark.test_params'
|
|
129
|
+
"""Test parameters that are specified by 'pytest.mark.test_params'
|
|
124
130
|
|
|
125
131
|
:param Union[str] shared_result:
|
|
126
132
|
If the value is provided, app._status and app._warning objects will be
|
|
@@ -148,9 +154,7 @@ def app(
|
|
|
148
154
|
make_app: Callable[[], SphinxTestApp],
|
|
149
155
|
shared_result: SharedResult,
|
|
150
156
|
) -> Iterator[SphinxTestApp]:
|
|
151
|
-
"""
|
|
152
|
-
Provides the 'sphinx.application.Sphinx' object
|
|
153
|
-
"""
|
|
157
|
+
"""Provides the 'sphinx.application.Sphinx' object"""
|
|
154
158
|
args, kwargs = app_params
|
|
155
159
|
app_ = make_app(*args, **kwargs)
|
|
156
160
|
yield app_
|
|
@@ -168,24 +172,19 @@ def app(
|
|
|
168
172
|
|
|
169
173
|
@pytest.fixture
|
|
170
174
|
def status(app: SphinxTestApp) -> StringIO:
|
|
171
|
-
"""
|
|
172
|
-
Back-compatibility for testing with previous @with_app decorator
|
|
173
|
-
"""
|
|
175
|
+
"""Back-compatibility for testing with previous @with_app decorator"""
|
|
174
176
|
return app.status
|
|
175
177
|
|
|
176
178
|
|
|
177
179
|
@pytest.fixture
|
|
178
180
|
def warning(app: SphinxTestApp) -> StringIO:
|
|
179
|
-
"""
|
|
180
|
-
Back-compatibility for testing with previous @with_app decorator
|
|
181
|
-
"""
|
|
181
|
+
"""Back-compatibility for testing with previous @with_app decorator"""
|
|
182
182
|
return app.warning
|
|
183
183
|
|
|
184
184
|
|
|
185
185
|
@pytest.fixture
|
|
186
186
|
def make_app(test_params: dict[str, Any]) -> Iterator[Callable[[], SphinxTestApp]]:
|
|
187
|
-
"""
|
|
188
|
-
Provides make_app function to initialize SphinxTestApp instance.
|
|
187
|
+
"""Provides make_app function to initialize SphinxTestApp instance.
|
|
189
188
|
if you want to initialize 'app' in your test function. please use this
|
|
190
189
|
instead of using SphinxTestApp class directory.
|
|
191
190
|
"""
|
|
@@ -222,9 +221,8 @@ def _shared_result_cache() -> None:
|
|
|
222
221
|
|
|
223
222
|
|
|
224
223
|
@pytest.fixture
|
|
225
|
-
def if_graphviz_found(app: SphinxTestApp) -> None:
|
|
226
|
-
"""
|
|
227
|
-
The test will be skipped when using 'if_graphviz_found' fixture and graphviz
|
|
224
|
+
def if_graphviz_found(app: SphinxTestApp) -> None:
|
|
225
|
+
"""The test will be skipped when using 'if_graphviz_found' fixture and graphviz
|
|
228
226
|
dot command is not found.
|
|
229
227
|
"""
|
|
230
228
|
graphviz_dot = getattr(app.config, 'graphviz_dot', '')
|
|
@@ -246,9 +244,8 @@ def sphinx_test_tempdir(tmp_path_factory: pytest.TempPathFactory) -> Path:
|
|
|
246
244
|
|
|
247
245
|
|
|
248
246
|
@pytest.fixture
|
|
249
|
-
def rollback_sysmodules() -> Iterator[None]:
|
|
250
|
-
"""
|
|
251
|
-
Rollback sys.modules to its value before testing to unload modules
|
|
247
|
+
def rollback_sysmodules() -> Iterator[None]:
|
|
248
|
+
"""Rollback sys.modules to its value before testing to unload modules
|
|
252
249
|
during tests.
|
|
253
250
|
|
|
254
251
|
For example, used in test_ext_autosummary.py to permit unloading the
|