uEdition 1.3.1__py3-none-any.whl → 2.0.0a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of uEdition might be problematic. Click here for more details.
- uedition/__about__.py +2 -1
- uedition/cli/__init__.py +0 -7
- uedition/cli/build.py +75 -68
- uedition/cli/language.py +6 -0
- uedition/cli/serve.py +3 -1
- uedition/cli/update.py +6 -0
- uedition/ext/config.py +9 -175
- uedition/ext/settings.py +131 -0
- uedition/ext/tei/builder.py +2 -3
- uedition/ext/tei/parser.py +91 -107
- uedition/settings.py +16 -7
- {uedition-1.3.1.dist-info → uedition-2.0.0a1.dist-info}/METADATA +9 -4
- uedition-2.0.0a1.dist-info/RECORD +25 -0
- {uedition-1.3.1.dist-info → uedition-2.0.0a1.dist-info}/WHEEL +1 -1
- uedition/cli/check.py +0 -183
- uedition-1.3.1.dist-info/RECORD +0 -25
- {uedition-1.3.1.dist-info → uedition-2.0.0a1.dist-info}/entry_points.txt +0 -0
- {uedition-1.3.1.dist-info → uedition-2.0.0a1.dist-info}/licenses/LICENSE.txt +0 -0
uedition/ext/tei/parser.py
CHANGED
|
@@ -1,7 +1,9 @@
|
|
|
1
|
+
# noqa: A005
|
|
1
2
|
# SPDX-FileCopyrightText: 2023-present Mark Hall <mark.hall@work.room3b.eu>
|
|
2
3
|
#
|
|
3
4
|
# SPDX-License-Identifier: MIT
|
|
4
5
|
"""TEI parsing extension for Sphinx."""
|
|
6
|
+
|
|
5
7
|
import re
|
|
6
8
|
from typing import Callable
|
|
7
9
|
|
|
@@ -10,8 +12,10 @@ from lxml import etree
|
|
|
10
12
|
from sphinx import addnodes
|
|
11
13
|
from sphinx.application import Sphinx
|
|
12
14
|
from sphinx.parsers import Parser as SphinxParser
|
|
15
|
+
from sphinx.util import logging
|
|
13
16
|
from sphinx.writers.html import HTMLWriter
|
|
14
17
|
|
|
18
|
+
logger = logging.getLogger(__name__)
|
|
15
19
|
namespaces = {"tei": "http://www.tei-c.org/ns/1.0", "uedition": "https://uedition.readthedocs.org"}
|
|
16
20
|
|
|
17
21
|
|
|
@@ -33,18 +37,18 @@ class TeiElement(nodes.Element):
|
|
|
33
37
|
def tei_element_html_enter(self: "HTMLWriter", node: TeiElement) -> None:
|
|
34
38
|
"""Visit a TeiElement and generate the correct HTML."""
|
|
35
39
|
if node.get("html_tag") is not None:
|
|
36
|
-
buffer = [f
|
|
40
|
+
buffer = [f"<{node.get('html_tag')}"]
|
|
37
41
|
if node.get("ids"):
|
|
38
42
|
buffer.append(f' id="{node.get("ids")[0]}"')
|
|
39
43
|
for key, value in node.get("tei_attributes").items():
|
|
40
44
|
buffer.append(f' {key}="{value}"')
|
|
41
|
-
self.body.append(f'
|
|
45
|
+
self.body.append(f"{''.join(buffer)}>")
|
|
42
46
|
|
|
43
47
|
|
|
44
48
|
def tei_element_html_exit(self: "HTMLWriter", node: TeiElement) -> None:
|
|
45
49
|
"""Close the HTML tag."""
|
|
46
50
|
if node.get("html_tag") is not None:
|
|
47
|
-
self.body.append(f
|
|
51
|
+
self.body.append(f"</{node.get('html_tag')}>")
|
|
48
52
|
|
|
49
53
|
|
|
50
54
|
class TEIParser(SphinxParser):
|
|
@@ -54,7 +58,8 @@ class TEIParser(SphinxParser):
|
|
|
54
58
|
"""Specify that only .tei files are parsed"""
|
|
55
59
|
|
|
56
60
|
def parse(self: "TEIParser", inputstring: str, document: nodes.document) -> None:
|
|
57
|
-
"""
|
|
61
|
+
"""
|
|
62
|
+
Parse source TEI text.
|
|
58
63
|
|
|
59
64
|
This function creates the basic structure and then the :func:`~uEdition.extensions.tei.TEIParser._walk_tree`
|
|
60
65
|
function is used to actually process the XML.
|
|
@@ -75,43 +80,55 @@ class TEIParser(SphinxParser):
|
|
|
75
80
|
doc_title = nodes.title()
|
|
76
81
|
doc_title.append(nodes.Text(title if title else "[Untitled]"))
|
|
77
82
|
doc_section.append(doc_title)
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
83
|
+
for conf_section in self.config.tei["sections"]:
|
|
84
|
+
section = nodes.section(ids=[nodes.make_id(conf_section["name"])])
|
|
85
|
+
if conf_section["title"]:
|
|
81
86
|
section_title = nodes.title()
|
|
82
87
|
section_title.append(nodes.Text(conf_section["title"]))
|
|
83
88
|
section.append(section_title)
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
tmp = nodes.section()
|
|
89
|
+
if conf_section["type"] == "text":
|
|
90
|
+
# Process a text section
|
|
91
|
+
sources = root.xpath(conf_section["selector"], namespaces=namespaces)
|
|
92
|
+
if len(sources) > 0:
|
|
93
|
+
doc_section.append(section)
|
|
94
|
+
tmp = nodes.section()
|
|
95
|
+
for source in sources:
|
|
92
96
|
for child in source:
|
|
93
|
-
self._walk_tree(child, tmp
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
+
self._walk_tree(child, tmp)
|
|
98
|
+
self._wrap_sections(section, tmp)
|
|
99
|
+
elif conf_section["type"] == "textlist":
|
|
100
|
+
# Process a text section
|
|
101
|
+
sources = root.xpath(conf_section["selector"], namespaces=namespaces)
|
|
102
|
+
if len(sources) > 0:
|
|
103
|
+
if conf_section["sort"]:
|
|
104
|
+
source.sort(key=self._sort_key(conf_section["sort"]))
|
|
105
|
+
doc_section.append(section)
|
|
106
|
+
for source in sources:
|
|
107
|
+
tmp = nodes.section(ids=[source.attrib["id"]])
|
|
108
|
+
for child in source:
|
|
109
|
+
self._walk_tree(child, tmp)
|
|
110
|
+
section.append(tmp)
|
|
111
|
+
# self._wrap_sections(section, tmp)
|
|
112
|
+
elif conf_section["type"] == "metadata":
|
|
113
|
+
# Process a field or metadata section
|
|
114
|
+
sources = root.xpath(conf_section["selector"], namespaces=namespaces)
|
|
115
|
+
if len(sources) > 0:
|
|
97
116
|
doc_section.append(section)
|
|
98
117
|
fields = nodes.definition_list()
|
|
99
118
|
section.append(fields)
|
|
100
119
|
for field in conf_section["fields"]:
|
|
101
120
|
if field["type"] == "single":
|
|
102
|
-
self._parse_single_field(fields, field,
|
|
121
|
+
self._parse_single_field(fields, field, sources[0])
|
|
103
122
|
elif field["type"] == "list":
|
|
104
|
-
self._parse_list_field(fields, field,
|
|
123
|
+
self._parse_list_field(fields, field, sources[0])
|
|
105
124
|
elif field["type"] == "download":
|
|
106
|
-
self._parse_download_field(fields, field,
|
|
125
|
+
self._parse_download_field(fields, field, sources[0])
|
|
107
126
|
document.append(doc_section)
|
|
108
127
|
|
|
109
|
-
def _sort_key(
|
|
110
|
-
self: "TEIParser", xpath: str
|
|
111
|
-
) -> Callable[[etree.Element], tuple[str, ...] | tuple[int, ...] | tuple[None]]:
|
|
128
|
+
def _sort_key(self: "TEIParser", xpath: str) -> Callable[[etree.Element], tuple[tuple[int, ...], ...]]:
|
|
112
129
|
"""Create a sortkey that understands about `page,line` patterns for sorting."""
|
|
113
130
|
|
|
114
|
-
def sorter(node: etree.Element) -> tuple[
|
|
131
|
+
def sorter(node: etree.Element) -> tuple[tuple[int, ...], ...]:
|
|
115
132
|
value = node.xpath(xpath, namespaces=namespaces)
|
|
116
133
|
if value is not None and len(value) > 0:
|
|
117
134
|
if isinstance(value, list):
|
|
@@ -128,74 +145,57 @@ class TEIParser(SphinxParser):
|
|
|
128
145
|
else:
|
|
129
146
|
order.append(tpl)
|
|
130
147
|
return tuple(order)
|
|
131
|
-
return (0,)
|
|
148
|
+
return ((0,),)
|
|
132
149
|
|
|
133
150
|
return sorter
|
|
134
151
|
|
|
135
|
-
def _walk_tree(self: "TEIParser", node: etree.Element, parent: nodes.Element
|
|
136
|
-
"""Walk the XML tree and create the appropriate AST nodes.
|
|
152
|
+
def _walk_tree(self: "TEIParser", node: etree.Element, parent: nodes.Element) -> None:
|
|
153
|
+
"""Walk the XML tree and create the appropriate AST nodes."""
|
|
154
|
+
for conf in self.config.tei["blocks"]:
|
|
155
|
+
if len(node.xpath(f"self::{conf['selector']}", namespaces=namespaces)) > 0:
|
|
156
|
+
attrs = self._parse_attributes(node, conf["attributes"])
|
|
157
|
+
attrs.update({f"data-tei-block-{conf['name']}": ""})
|
|
158
|
+
element = TeiElement(
|
|
159
|
+
html_tag=conf["tag"] if conf["tag"] else "div", tei_tag=node.tag, tei_attributes=attrs
|
|
160
|
+
)
|
|
161
|
+
for child in node:
|
|
162
|
+
self._walk_tree(child, element)
|
|
163
|
+
parent.append(element)
|
|
164
|
+
return
|
|
165
|
+
for conf in self.config.tei["marks"]:
|
|
166
|
+
if len(node.xpath(f"self::{conf['selector']}", namespaces=namespaces)) > 0:
|
|
167
|
+
attrs = self._parse_attributes(node, conf["attributes"])
|
|
168
|
+
attrs.update({f"data-tei-mark-{conf['name']}": ""})
|
|
169
|
+
element = TeiElement(
|
|
170
|
+
html_tag=conf["tag"] if conf["tag"] else "span", tei_tag=node.tag, tei_attributes=attrs
|
|
171
|
+
)
|
|
172
|
+
if len(node) == 0 and node.text:
|
|
173
|
+
element.append(nodes.Text(node.text))
|
|
174
|
+
else:
|
|
175
|
+
for child in node:
|
|
176
|
+
self._walk_tree(child, element)
|
|
177
|
+
parent.append(element)
|
|
178
|
+
return
|
|
179
|
+
if len(node) == 0:
|
|
180
|
+
parent.append(nodes.Text(node.text))
|
|
181
|
+
else:
|
|
182
|
+
logger.warning(f"No block or mark configured for {node.tag}")
|
|
137
183
|
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
# Loop over the XML node attributes and apply any attribute transforms defined in the matching rule
|
|
149
|
-
for key, value in node.attrib.items():
|
|
150
|
-
# Always strip the namespace from the `id` attribute
|
|
151
|
-
if key == "{http://www.w3.org/XML/1998/namespace}id":
|
|
152
|
-
key = "id" # noqa: PLW2901
|
|
153
|
-
if rule and "attributes" in rule:
|
|
154
|
-
processed = False
|
|
155
|
-
for attr_rule in rule["attributes"]:
|
|
156
|
-
if attr_rule["action"] == "copy":
|
|
157
|
-
if key == attr_rule["source"]:
|
|
158
|
-
# Copied attributes are added without a `data-` prefix
|
|
159
|
-
attributes[attr_rule["attr"]] = value
|
|
160
|
-
elif attr_rule["action"] == "delete":
|
|
161
|
-
if key == attr_rule["attr"]:
|
|
162
|
-
processed = True
|
|
163
|
-
elif attr_rule["action"] == "set":
|
|
164
|
-
if key == attr_rule["attr"]:
|
|
165
|
-
value = attr_rule["value"] # noqa: PLW2901
|
|
166
|
-
# if the attribute did not match any attribute transform
|
|
167
|
-
if not processed:
|
|
168
|
-
# The id attribute is always output as is, all other attributes are prefixed with `data-`
|
|
169
|
-
if key == "id":
|
|
170
|
-
attributes["id"] = value
|
|
171
|
-
else:
|
|
172
|
-
attributes[f"data-{key}"] = value
|
|
173
|
-
else: # noqa: PLR5501
|
|
174
|
-
# The id attribute is always output as is, all other attributes are prefixed with `data-`
|
|
175
|
-
if key == "id":
|
|
176
|
-
attributes["id"] = value
|
|
184
|
+
def _parse_attributes(self, node: etree.Element, attribute_configs: list) -> dict:
|
|
185
|
+
attrs = {}
|
|
186
|
+
for conf in attribute_configs:
|
|
187
|
+
if conf["name"] in node.attrib:
|
|
188
|
+
if conf["type"] == "id-ref" and node.attrib[conf["name"]].startswith("#"):
|
|
189
|
+
attrs[f"data-tei-attribute-{conf['name']}"] = node.attrib[conf["name"]][1:]
|
|
190
|
+
elif conf["type"] == "static":
|
|
191
|
+
attrs[f"data-tei-attribute-{conf['name']}"] = conf["value"]
|
|
192
|
+
elif conf["type"] == "html-attribute":
|
|
193
|
+
attrs[conf["target"]] = node.attrib[conf["name"]]
|
|
177
194
|
else:
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
tei_tag=node.tag,
|
|
183
|
-
tei_attributes=attributes,
|
|
184
|
-
)
|
|
185
|
-
parent.append(new_element)
|
|
186
|
-
if rule is not None and "text" in rule and rule["text"]:
|
|
187
|
-
# If there is a `text` key in the rule, use that to set the text
|
|
188
|
-
if rule["text"]["action"] == "from-attribute" and rule["text"]["attr"] in node.attrib:
|
|
189
|
-
new_element.append(nodes.Text(node.attrib[rule["text"]["attr"]]))
|
|
190
|
-
elif node.text and (is_leaf or not text_only_in_leaf_nodes):
|
|
191
|
-
# Only create text content if there is text and we either are in a leaf node or are adding all text
|
|
192
|
-
new_element.append(nodes.Text(node.text))
|
|
193
|
-
# Process any children
|
|
194
|
-
for child in node:
|
|
195
|
-
self._walk_tree(child, new_element, rules)
|
|
196
|
-
# If there is text after this XML node and we are adding all text, then add text content to the parent
|
|
197
|
-
if node.tail and not text_only_in_leaf_nodes:
|
|
198
|
-
parent.append(nodes.Text(node.tail))
|
|
195
|
+
attrs[f"data-tei-attribute-{conf['name']}"] = node.attrib[conf["name"]]
|
|
196
|
+
elif conf["default"]:
|
|
197
|
+
attrs[f"data-tei-attribute-{conf['name']}"] = conf["default"]
|
|
198
|
+
return attrs
|
|
199
199
|
|
|
200
200
|
def _wrap_sections(self: "TEIParser", section: nodes.Element, tmp: nodes.Element) -> None:
|
|
201
201
|
"""Ensure that sections are correctly wrapped."""
|
|
@@ -213,7 +213,7 @@ class TEIParser(SphinxParser):
|
|
|
213
213
|
while section_level <= section_stack[-1][0]:
|
|
214
214
|
section_stack.pop()
|
|
215
215
|
new_section = nodes.section(ids=[nodes.make_id(node.astext())])
|
|
216
|
-
title = nodes.title()
|
|
216
|
+
title = nodes.title(attributes={"data-test": ""})
|
|
217
217
|
title.children = node.children
|
|
218
218
|
new_section.append(title)
|
|
219
219
|
section_stack[-1][1].append(new_section)
|
|
@@ -239,25 +239,9 @@ class TEIParser(SphinxParser):
|
|
|
239
239
|
if not in_heading:
|
|
240
240
|
section_stack[-1][1].append(node)
|
|
241
241
|
|
|
242
|
-
def _rule_for_node(self: "TEIParser", node: etree.Element, rules: list[dict]) -> dict:
|
|
243
|
-
"""Determine the first matching mapping rule for the node from the configured rules."""
|
|
244
|
-
tei_tag = node.tag
|
|
245
|
-
for rule in rules:
|
|
246
|
-
if rule["selector"]["tag"] == tei_tag:
|
|
247
|
-
if "attributes" in rule["selector"]:
|
|
248
|
-
attr_match = True
|
|
249
|
-
for attr_rule in rule["selector"]["attributes"]:
|
|
250
|
-
if attr_rule["attr"] not in node.attrib or node.attrib[attr_rule["attr"]] != attr_rule["value"]:
|
|
251
|
-
attr_match = False
|
|
252
|
-
break
|
|
253
|
-
if not attr_match:
|
|
254
|
-
continue
|
|
255
|
-
return rule
|
|
256
|
-
return None
|
|
257
|
-
|
|
258
242
|
def _parse_single_field(self: "TEIParser", parent: etree.Element, field: dict, root: etree.Element) -> None:
|
|
259
243
|
"""Parse a single metadata field."""
|
|
260
|
-
content = root.xpath(field["
|
|
244
|
+
content = root.xpath(field["selector"], namespaces=namespaces)
|
|
261
245
|
if len(content) > 0:
|
|
262
246
|
if isinstance(content, list):
|
|
263
247
|
content = content[0]
|
uedition/settings.py
CHANGED
|
@@ -5,6 +5,7 @@
|
|
|
5
5
|
|
|
6
6
|
All application settings are accessed via the `settings` dictionary.
|
|
7
7
|
"""
|
|
8
|
+
|
|
8
9
|
import os
|
|
9
10
|
from typing import Annotated, Any, Dict, Tuple, Type
|
|
10
11
|
|
|
@@ -15,11 +16,21 @@ from pydantic_settings import BaseSettings, PydanticBaseSettingsSource
|
|
|
15
16
|
from yaml import safe_load
|
|
16
17
|
|
|
17
18
|
|
|
19
|
+
class NoConfigError(Exception):
|
|
20
|
+
"""Exception to signal that no configuration file was found in the current directory."""
|
|
21
|
+
|
|
22
|
+
def __init__(self) -> None:
|
|
23
|
+
"""Initialise the Exception with the default error message."""
|
|
24
|
+
super().__init__("No uEdition.yml or uEdition.yaml was found in the current directory")
|
|
25
|
+
|
|
26
|
+
|
|
18
27
|
class YAMLConfigSettingsSource(PydanticBaseSettingsSource):
|
|
19
28
|
"""Loads the configuration settings from a YAML file."""
|
|
20
29
|
|
|
21
30
|
def get_field_value(
|
|
22
|
-
self: "YAMLConfigSettingsSource",
|
|
31
|
+
self: "YAMLConfigSettingsSource",
|
|
32
|
+
field: FieldInfo, # noqa: ARG002
|
|
33
|
+
field_name: str,
|
|
23
34
|
) -> Tuple[Any, str, bool]:
|
|
24
35
|
"""Get the value of a specific field."""
|
|
25
36
|
encoding = self.config.get("env_file_encoding")
|
|
@@ -75,8 +86,6 @@ class RepositorySettings(BaseModel):
|
|
|
75
86
|
|
|
76
87
|
url: str | None = None
|
|
77
88
|
"""The repository's URL."""
|
|
78
|
-
branch: str | None = None
|
|
79
|
-
"""The repository's branch."""
|
|
80
89
|
|
|
81
90
|
|
|
82
91
|
class AuthorSettings(BaseModel):
|
|
@@ -107,7 +116,7 @@ def convert_output_str_to_dict(value: str | dict) -> dict:
|
|
|
107
116
|
class Settings(BaseSettings):
|
|
108
117
|
"""Application settings."""
|
|
109
118
|
|
|
110
|
-
version: str = "
|
|
119
|
+
version: str = "2"
|
|
111
120
|
"""The configuration file version."""
|
|
112
121
|
author: AuthorSettings = AuthorSettings()
|
|
113
122
|
"""The author settings."""
|
|
@@ -119,8 +128,8 @@ class Settings(BaseSettings):
|
|
|
119
128
|
"""The repository settings."""
|
|
120
129
|
title: dict = {}
|
|
121
130
|
"""The titles for the individual languages."""
|
|
122
|
-
|
|
123
|
-
"""
|
|
131
|
+
sphinx_config: dict = {}
|
|
132
|
+
"""Sphinx configuration."""
|
|
124
133
|
|
|
125
134
|
@classmethod
|
|
126
135
|
def settings_customise_sources(
|
|
@@ -147,4 +156,4 @@ settings = Settings().model_dump()
|
|
|
147
156
|
def reload_settings() -> None:
|
|
148
157
|
"""Reload the settings."""
|
|
149
158
|
settings.clear()
|
|
150
|
-
settings.update(Settings().
|
|
159
|
+
settings.update(Settings().model_dump())
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
2
|
Name: uEdition
|
|
3
|
-
Version:
|
|
3
|
+
Version: 2.0.0a1
|
|
4
4
|
Project-URL: Documentation, https://github.com/uEdition/uEdition#readme
|
|
5
5
|
Project-URL: Issues, https://github.com/uEdition/uEdition/issues
|
|
6
6
|
Project-URL: Source, https://github.com/uEdition/uEdition
|
|
@@ -11,17 +11,22 @@ Classifier: Development Status :: 4 - Beta
|
|
|
11
11
|
Classifier: Programming Language :: Python
|
|
12
12
|
Classifier: Programming Language :: Python :: 3.10
|
|
13
13
|
Classifier: Programming Language :: Python :: 3.11
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
14
16
|
Classifier: Programming Language :: Python :: Implementation :: CPython
|
|
15
17
|
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
|
16
18
|
Requires-Python: >=3.10
|
|
17
19
|
Requires-Dist: copier<10.0.0,>=9.0.0
|
|
18
|
-
Requires-Dist: jupyter-book<2.0.0,>=1.0.0
|
|
19
20
|
Requires-Dist: livereload
|
|
20
21
|
Requires-Dist: lxml<6.0.0,>=4.9.2
|
|
22
|
+
Requires-Dist: myst-parser<5,>=4.0.1
|
|
21
23
|
Requires-Dist: pydantic-settings<3.0.0,>=2.0.0
|
|
22
24
|
Requires-Dist: pydantic<3.0.0,>=2.0.0
|
|
23
25
|
Requires-Dist: pyyaml<7.0.0,>=6.0.0
|
|
24
|
-
Requires-Dist:
|
|
26
|
+
Requires-Dist: sphinx-book-theme<2,>=1.1.4
|
|
27
|
+
Requires-Dist: sphinx-external-toc<2,>=1.0.1
|
|
28
|
+
Requires-Dist: sphinx<9,>=8.2.3
|
|
29
|
+
Requires-Dist: typer<1.0.0
|
|
25
30
|
Description-Content-Type: text/markdown
|
|
26
31
|
|
|
27
32
|
# μEdition
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
uedition/__about__.py,sha256=kq7WGmRyG7G8lDeopl9H4oZ4X9xtex9Qs3BryTuumU0,160
|
|
2
|
+
uedition/__init__.py,sha256=xDDK2i5l1NLhxq6_LJH5g7UJDrFEjIJNmT2tcg7xNWI,301
|
|
3
|
+
uedition/__main__.py,sha256=Pg_yGV-ndR2iiImDC3f017llSX6pSYMslIwZlw8vEpQ,189
|
|
4
|
+
uedition/settings.py,sha256=G73r6muJmRzuENZ-2n51bBf88qS3tRv0kdxr2SRt1j4,5048
|
|
5
|
+
uedition/cli/__init__.py,sha256=gnpl_N8uaw-4uF5ByWV3jXveJBvjLb_uay5YkYCUQWw,1478
|
|
6
|
+
uedition/cli/build.py,sha256=L_-OsIEFBXQh-aTHLwLhOXJzXqRH61X2SgVIPxmUgQ0,8169
|
|
7
|
+
uedition/cli/create.py,sha256=Q-SvDq9VtmUP4DQhuuvt1eZ_72sX8_tcFOj2Bt_T6J8,371
|
|
8
|
+
uedition/cli/language.py,sha256=JAyUwNa4gwqMvrJDPPKGkMLm5Cx9sHJkU5r6xTAMb7M,2214
|
|
9
|
+
uedition/cli/serve.py,sha256=FcKp0IbjcyCgn1XjU8VdhI59pGRMNCSXa5jbAS23nxs,1513
|
|
10
|
+
uedition/cli/update.py,sha256=SmAcczHxj3j3X0QYplvXPoIHS8XEfXYVqgXBney0v9c,550
|
|
11
|
+
uedition/ext/__init__.py,sha256=hAK3MB5il4tAkfWnZNVxIJhfJ5vN0Fdmmtz0ZAYsvo4,406
|
|
12
|
+
uedition/ext/config.py,sha256=zu0XSH3Ca09n--umhUJ7k6611lVCecOTVZCWAFn4TRU,994
|
|
13
|
+
uedition/ext/language_switcher.css,sha256=y4LzkCgCm6E_nHt15I4Ku5QzBNpjwda9bt9FsqD1ybM,132
|
|
14
|
+
uedition/ext/language_switcher.js,sha256=HIgQiLg0WGS_G_VjpfEpTDLqb1HwHxcL3r6mdoSUix4,2697
|
|
15
|
+
uedition/ext/language_switcher.py,sha256=tHpf4HsvMtatVn5dQ3EFlrk5urFaMzsZZY755cvgCu8,1425
|
|
16
|
+
uedition/ext/settings.py,sha256=CCbvwlWjhikhoeRZ5E_SuA4zUIqDBMkRes_hjOUxjWk,3735
|
|
17
|
+
uedition/ext/tei/__init__.py,sha256=8KgVi31bz8nI65m6u4EdT_f1qNCP45HrU0V7MSGlZxA,1074
|
|
18
|
+
uedition/ext/tei/builder.py,sha256=LkLR3cu1M6lHHdcjhH5esZ9Qn6eFBXCZWkxVxdXv69E,11719
|
|
19
|
+
uedition/ext/tei/parser.py,sha256=ebeqEiji2RZ-Gfszf0Yaa-Lqww-EPqLmxXiMBq-gj90,13216
|
|
20
|
+
uedition/ext/tei/tei_download.js,sha256=5_IPCuamZGPXWriPPPz5wA-zo0Y0Oy1858S6ltxSdQ8,2151
|
|
21
|
+
uedition-2.0.0a1.dist-info/METADATA,sha256=YquOplbaOq4Z17fRpMEQMK2S1imGpd0HS3TWeCaWXEk,2572
|
|
22
|
+
uedition-2.0.0a1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
23
|
+
uedition-2.0.0a1.dist-info/entry_points.txt,sha256=cDOOVBb1SZ072ZkY1hW4Y7I_WKKGCtCJtDY1XST1Hr4,96
|
|
24
|
+
uedition-2.0.0a1.dist-info/licenses/LICENSE.txt,sha256=MhLJl8GE8mnuO5i_pvKaobpIGnhiAEdkY-a6LKiuwCE,1101
|
|
25
|
+
uedition-2.0.0a1.dist-info/RECORD,,
|
uedition/cli/check.py
DELETED
|
@@ -1,183 +0,0 @@
|
|
|
1
|
-
# SPDX-FileCopyrightText: 2023-present Mark Hall <mark.hall@work.room3b.eu>
|
|
2
|
-
#
|
|
3
|
-
# SPDX-License-Identifier: MIT
|
|
4
|
-
"""The uEdition check functionality for validating a uEdition and its files."""
|
|
5
|
-
import os
|
|
6
|
-
from threading import Thread
|
|
7
|
-
|
|
8
|
-
import typer
|
|
9
|
-
from rich import print as print_cli
|
|
10
|
-
from rich.progress import Progress
|
|
11
|
-
from yaml import safe_load
|
|
12
|
-
|
|
13
|
-
from uedition.settings import settings
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
def collect_files(toc: dict) -> list[str]:
|
|
17
|
-
"""Collect all files from a TOC."""
|
|
18
|
-
files = []
|
|
19
|
-
if "parts" in toc:
|
|
20
|
-
for part in toc["parts"]:
|
|
21
|
-
files.extend(collect_files(part))
|
|
22
|
-
elif "chapters" in toc:
|
|
23
|
-
for chapter in toc["chapters"]:
|
|
24
|
-
files.append(chapter["file"])
|
|
25
|
-
files.extend(collect_files(chapter))
|
|
26
|
-
elif "sections" in toc:
|
|
27
|
-
for sections in toc["sections"]:
|
|
28
|
-
files.append(sections["file"])
|
|
29
|
-
return files
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
def compare_tocs(prefix: str, toc_a: dict, toc_b: dict) -> list[tuple[str | None, str | None]]:
|
|
33
|
-
"""Recursively compare the structure of two TOCs."""
|
|
34
|
-
mismatches = []
|
|
35
|
-
if "parts" in toc_a and "parts" in toc_b:
|
|
36
|
-
pass
|
|
37
|
-
elif "chapters" in toc_a and "chapters" in toc_b:
|
|
38
|
-
pass
|
|
39
|
-
elif "sections" in toc_a and "sections" in toc_b:
|
|
40
|
-
pass
|
|
41
|
-
elif "parts" in toc_a:
|
|
42
|
-
mismatches.append(f"{prefix}has parts, which are missing from")
|
|
43
|
-
elif "chapters" in toc_a:
|
|
44
|
-
mismatches.append(f"{prefix}has chapters, which are missing from")
|
|
45
|
-
elif "sections" in toc_b:
|
|
46
|
-
mismatches.append(f"{prefix}has sections, which are missing from")
|
|
47
|
-
return mismatches
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
class ConfigurationFileChecks(Thread):
|
|
51
|
-
"""Basic configuration file checks."""
|
|
52
|
-
|
|
53
|
-
def __init__(self: "ConfigurationFileChecks", progress: Progress, task: int) -> None:
|
|
54
|
-
"""Initialise the thread."""
|
|
55
|
-
super().__init__(group=None)
|
|
56
|
-
self._progress = progress
|
|
57
|
-
self._task = task
|
|
58
|
-
self.errors = []
|
|
59
|
-
|
|
60
|
-
def run(self: "ConfigurationFileChecks") -> None:
|
|
61
|
-
"""Run the checks."""
|
|
62
|
-
for lang in settings["languages"]:
|
|
63
|
-
yaml_path = os.path.join(lang["path"], "_config.yml")
|
|
64
|
-
if os.path.exists(yaml_path):
|
|
65
|
-
with open(yaml_path) as in_f:
|
|
66
|
-
try:
|
|
67
|
-
safe_load(in_f)
|
|
68
|
-
except Exception as e:
|
|
69
|
-
self.errors.append(str(e))
|
|
70
|
-
else:
|
|
71
|
-
self.errors.append(f"Missing configuration file {yaml_path}")
|
|
72
|
-
self._progress.update(self._task, advance=1)
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
class TocFileChecks(Thread):
|
|
76
|
-
"""TOC file checks."""
|
|
77
|
-
|
|
78
|
-
def __init__(self: "TocFileChecks", progress: Progress, task: int) -> None:
|
|
79
|
-
"""Initialise the thread."""
|
|
80
|
-
super().__init__(group=None)
|
|
81
|
-
self._progress = progress
|
|
82
|
-
self._task = task
|
|
83
|
-
self.errors = []
|
|
84
|
-
|
|
85
|
-
def run(self: "TocFileChecks") -> None:
|
|
86
|
-
"""Run the checks."""
|
|
87
|
-
for lang in settings["languages"]:
|
|
88
|
-
yaml_path = os.path.join(lang["path"], "_toc.yml")
|
|
89
|
-
if os.path.exists(yaml_path):
|
|
90
|
-
with open(yaml_path) as in_f:
|
|
91
|
-
try:
|
|
92
|
-
toc = safe_load(in_f)
|
|
93
|
-
if "root" in toc:
|
|
94
|
-
root_path = os.path.join(lang["path"], f'{toc["root"]}.md')
|
|
95
|
-
if not os.path.exists(root_path):
|
|
96
|
-
self.errors.append(f'Root in {yaml_path} points to missing file {toc["root"]}.md')
|
|
97
|
-
root_base = os.path.dirname(root_path)
|
|
98
|
-
for filename in collect_files(toc):
|
|
99
|
-
if not os.path.exists(os.path.join(root_base, f"{filename}.md")):
|
|
100
|
-
self.errors.append(f"File {filename}.md missing in {yaml_path}")
|
|
101
|
-
else:
|
|
102
|
-
self.errors.append(f"No root in {yaml_path}")
|
|
103
|
-
except Exception as e:
|
|
104
|
-
self.errors.append(str(e))
|
|
105
|
-
else:
|
|
106
|
-
self.errors.append(f"Missing toc file {yaml_path}")
|
|
107
|
-
self._progress.update(self._task, advance=1)
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
class LanguageConsistencyChecks(Thread):
|
|
111
|
-
"""Multi-language consistency checks."""
|
|
112
|
-
|
|
113
|
-
def __init__(self: "LanguageConsistencyChecks", progress: Progress, task: int) -> None:
|
|
114
|
-
"""Initialise the thread."""
|
|
115
|
-
super().__init__(group=None)
|
|
116
|
-
self._progress = progress
|
|
117
|
-
self._task = task
|
|
118
|
-
self.errors = []
|
|
119
|
-
|
|
120
|
-
def run(self: "LanguageConsistencyChecks") -> None:
|
|
121
|
-
"""Run the checks."""
|
|
122
|
-
if len(settings["languages"]) == 0:
|
|
123
|
-
return
|
|
124
|
-
base_toc_path = os.path.join(settings["languages"][0]["path"], "_toc.yml")
|
|
125
|
-
if not os.path.exists(base_toc_path):
|
|
126
|
-
return
|
|
127
|
-
with open(base_toc_path) as in_f:
|
|
128
|
-
try:
|
|
129
|
-
base_toc = safe_load(in_f)
|
|
130
|
-
except Exception:
|
|
131
|
-
return
|
|
132
|
-
for lang in settings["languages"][1:]:
|
|
133
|
-
lang_toc_path = os.path.join(lang["path"], "_toc.yml")
|
|
134
|
-
if not os.path.exists(lang_toc_path):
|
|
135
|
-
continue
|
|
136
|
-
try:
|
|
137
|
-
with open(lang_toc_path) as in_f:
|
|
138
|
-
lang_toc = safe_load(in_f)
|
|
139
|
-
missmatches = compare_tocs("", base_toc, lang_toc)
|
|
140
|
-
if len(missmatches) > 0:
|
|
141
|
-
for mismatch in missmatches:
|
|
142
|
-
if mismatch[0] is None:
|
|
143
|
-
self.errors.append(f"{base_toc_path} {mismatch[1]} {lang_toc_path}")
|
|
144
|
-
elif mismatch[0] is None:
|
|
145
|
-
self.errors.append(f"{lang_toc_path} {mismatch[0]} {base_toc_path}")
|
|
146
|
-
self._progress.update(self._task, advance=1)
|
|
147
|
-
except Exception as e:
|
|
148
|
-
self.errors.append(f"Fail to check langauge {lang}: {e!s}")
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
def run() -> None:
|
|
152
|
-
"""Check that the μEdition is correctly set up."""
|
|
153
|
-
errors = []
|
|
154
|
-
with Progress() as progress:
|
|
155
|
-
threads = [
|
|
156
|
-
ConfigurationFileChecks(
|
|
157
|
-
progress,
|
|
158
|
-
progress.add_task("[green]Configuration file checks", total=len(settings["languages"])),
|
|
159
|
-
),
|
|
160
|
-
TocFileChecks(
|
|
161
|
-
progress,
|
|
162
|
-
progress.add_task("[green]TOC file checks", total=len(settings["languages"])),
|
|
163
|
-
),
|
|
164
|
-
LanguageConsistencyChecks(
|
|
165
|
-
progress,
|
|
166
|
-
progress.add_task(
|
|
167
|
-
"[green]Language consistency checks",
|
|
168
|
-
total=len(settings["languages"]) - 1,
|
|
169
|
-
),
|
|
170
|
-
),
|
|
171
|
-
]
|
|
172
|
-
for thread in threads:
|
|
173
|
-
thread.start()
|
|
174
|
-
for thread in threads:
|
|
175
|
-
thread.join()
|
|
176
|
-
errors.extend(thread.errors)
|
|
177
|
-
if len(errors) > 0:
|
|
178
|
-
print_cli("[red]:bug: The following errors were found:")
|
|
179
|
-
for error in errors:
|
|
180
|
-
print_cli(f"[red]* {error}")
|
|
181
|
-
raise typer.Exit(code=1)
|
|
182
|
-
else:
|
|
183
|
-
print_cli("[green]:+1: All checks successfully passed")
|
uedition-1.3.1.dist-info/RECORD
DELETED
|
@@ -1,25 +0,0 @@
|
|
|
1
|
-
uedition/__about__.py,sha256=prXHbrDuklWCBT07mMmChRQEDb1vRZGlN7s_X2peuB8,157
|
|
2
|
-
uedition/__init__.py,sha256=xDDK2i5l1NLhxq6_LJH5g7UJDrFEjIJNmT2tcg7xNWI,301
|
|
3
|
-
uedition/__main__.py,sha256=Pg_yGV-ndR2iiImDC3f017llSX6pSYMslIwZlw8vEpQ,189
|
|
4
|
-
uedition/settings.py,sha256=4jWfjD1KmYTwzeD6DhP9taWS-zFp9XHxo1QqUqsSlrA,4774
|
|
5
|
-
uedition/cli/__init__.py,sha256=h3N4ZwgRC72IlkCsTbke4PLTFv-_pn1-npXXPC7S78U,1642
|
|
6
|
-
uedition/cli/build.py,sha256=qrpNP227DlTqa6pHX3NrDw1cohc1dHPi5J6pMFgxKsk,8135
|
|
7
|
-
uedition/cli/check.py,sha256=Jlkyw6mgcz3HM-wJpmzCtKv054MFolCe7m6bSUDtsZA,7005
|
|
8
|
-
uedition/cli/create.py,sha256=Q-SvDq9VtmUP4DQhuuvt1eZ_72sX8_tcFOj2Bt_T6J8,371
|
|
9
|
-
uedition/cli/language.py,sha256=IqSJrZbrQzU-7TJqnCBeC2HUs1N01EAa7jFMvfXTsoA,1943
|
|
10
|
-
uedition/cli/serve.py,sha256=UfVsY26OW9yAz7rnjjatz1gEDGIi1kaTYlkUAiVCuRw,1391
|
|
11
|
-
uedition/cli/update.py,sha256=XKHnvorHqizsB5zP-8ifMrgnQuq6zRk6Tb03dBz_MI4,377
|
|
12
|
-
uedition/ext/__init__.py,sha256=hAK3MB5il4tAkfWnZNVxIJhfJ5vN0Fdmmtz0ZAYsvo4,406
|
|
13
|
-
uedition/ext/config.py,sha256=eEkBkAOaDQPzO_2Uj5OJxyAZiCG4I-TcOEqUyIY8IFY,6023
|
|
14
|
-
uedition/ext/language_switcher.css,sha256=y4LzkCgCm6E_nHt15I4Ku5QzBNpjwda9bt9FsqD1ybM,132
|
|
15
|
-
uedition/ext/language_switcher.js,sha256=HIgQiLg0WGS_G_VjpfEpTDLqb1HwHxcL3r6mdoSUix4,2697
|
|
16
|
-
uedition/ext/language_switcher.py,sha256=tHpf4HsvMtatVn5dQ3EFlrk5urFaMzsZZY755cvgCu8,1425
|
|
17
|
-
uedition/ext/tei/__init__.py,sha256=8KgVi31bz8nI65m6u4EdT_f1qNCP45HrU0V7MSGlZxA,1074
|
|
18
|
-
uedition/ext/tei/builder.py,sha256=XLFM11Gnt_4G4H-2gaJyjOgko_aMmvgZnKftMsSk8qw,11743
|
|
19
|
-
uedition/ext/tei/parser.py,sha256=dCtaYB8jTXKUdUzpEkoCEyRBIu5cDUpnu7TZSs7Ko0I,14342
|
|
20
|
-
uedition/ext/tei/tei_download.js,sha256=5_IPCuamZGPXWriPPPz5wA-zo0Y0Oy1858S6ltxSdQ8,2151
|
|
21
|
-
uedition-1.3.1.dist-info/METADATA,sha256=5vBeefaNrZ99Ep5y2B_A5FXXPolShaBqTCcrW5tVLHI,2358
|
|
22
|
-
uedition-1.3.1.dist-info/WHEEL,sha256=TJPnKdtrSue7xZ_AVGkp9YXcvDrobsjBds1du3Nx6dc,87
|
|
23
|
-
uedition-1.3.1.dist-info/entry_points.txt,sha256=cDOOVBb1SZ072ZkY1hW4Y7I_WKKGCtCJtDY1XST1Hr4,96
|
|
24
|
-
uedition-1.3.1.dist-info/licenses/LICENSE.txt,sha256=MhLJl8GE8mnuO5i_pvKaobpIGnhiAEdkY-a6LKiuwCE,1101
|
|
25
|
-
uedition-1.3.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|