py2docfx 0.1.10.dev1819897__py3-none-any.whl → 0.1.10.dev1824234__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- py2docfx/__main__.py +9 -4
- py2docfx/convert_prepare/environment.py +6 -4
- py2docfx/convert_prepare/generate_document.py +13 -1
- py2docfx/convert_prepare/get_source.py +19 -4
- py2docfx/convert_prepare/package_info.py +1 -1
- py2docfx/convert_prepare/pip_utils.py +0 -1
- py2docfx/convert_prepare/sphinx_caller.py +4 -0
- py2docfx/convert_prepare/tests/test_package_info.py +22 -7
- py2docfx/convert_prepare/tests/test_params.py +0 -5
- py2docfx/venv/0/Lib/site-packages/babel/__init__.py +1 -1
- py2docfx/venv/0/Lib/site-packages/babel/core.py +6 -2
- py2docfx/venv/0/Lib/site-packages/babel/dates.py +6 -1
- py2docfx/venv/0/Lib/site-packages/babel/lists.py +40 -11
- py2docfx/venv/0/Lib/site-packages/babel/localedata.py +26 -2
- py2docfx/venv/0/Lib/site-packages/babel/localtime/_helpers.py +14 -0
- py2docfx/venv/0/Lib/site-packages/babel/messages/_compat.py +34 -0
- py2docfx/venv/0/Lib/site-packages/babel/messages/catalog.py +5 -1
- py2docfx/venv/0/Lib/site-packages/babel/messages/checkers.py +3 -8
- py2docfx/venv/0/Lib/site-packages/babel/messages/extract.py +24 -23
- py2docfx/venv/0/Lib/site-packages/babel/messages/frontend.py +122 -48
- py2docfx/venv/0/Lib/site-packages/babel/plural.py +1 -2
- py2docfx/venv/0/Lib/site-packages/babel/support.py +6 -4
- py2docfx/venv/template/Lib/site-packages/babel/__init__.py +1 -1
- py2docfx/venv/template/Lib/site-packages/babel/core.py +6 -2
- py2docfx/venv/template/Lib/site-packages/babel/dates.py +6 -1
- py2docfx/venv/template/Lib/site-packages/babel/lists.py +40 -11
- py2docfx/venv/template/Lib/site-packages/babel/localedata.py +26 -2
- py2docfx/venv/template/Lib/site-packages/babel/localtime/_helpers.py +14 -0
- py2docfx/venv/template/Lib/site-packages/babel/messages/_compat.py +34 -0
- py2docfx/venv/template/Lib/site-packages/babel/messages/catalog.py +5 -1
- py2docfx/venv/template/Lib/site-packages/babel/messages/checkers.py +3 -8
- py2docfx/venv/template/Lib/site-packages/babel/messages/extract.py +24 -23
- py2docfx/venv/template/Lib/site-packages/babel/messages/frontend.py +122 -48
- py2docfx/venv/template/Lib/site-packages/babel/plural.py +1 -2
- py2docfx/venv/template/Lib/site-packages/babel/support.py +6 -4
- {py2docfx-0.1.10.dev1819897.dist-info → py2docfx-0.1.10.dev1824234.dist-info}/METADATA +1 -1
- {py2docfx-0.1.10.dev1819897.dist-info → py2docfx-0.1.10.dev1824234.dist-info}/RECORD +39 -37
- {py2docfx-0.1.10.dev1819897.dist-info → py2docfx-0.1.10.dev1824234.dist-info}/WHEEL +0 -0
- {py2docfx-0.1.10.dev1819897.dist-info → py2docfx-0.1.10.dev1824234.dist-info}/top_level.txt +0 -0
py2docfx/__main__.py
CHANGED
@@ -231,12 +231,13 @@ def donwload_package_generate_documents(
|
|
231
231
|
output_doc_folder: os.PathLike | None,
|
232
232
|
github_token: str, ado_token: str):
|
233
233
|
|
234
|
+
start_time = time.time()
|
234
235
|
venv_manager = VirtualEnvironmentManager(package_info_list, required_package_list, github_token, ado_token)
|
235
236
|
template_venv_id = "template"
|
236
237
|
template_venv_path = venv_manager.create_template_venv(template_venv_id)
|
237
|
-
|
238
|
+
end_time = time.time()
|
239
|
+
print(f"<create_template_venv>{template_venv_id},{end_time-start_time}<create_template_venv/>")
|
238
240
|
def initial_venv_cache():
|
239
|
-
start_time = time.time()
|
240
241
|
package_count = len(package_info_list)
|
241
242
|
cache_cout = CACHED_VIRTUALENV_LENGTH
|
242
243
|
if package_count < CACHED_VIRTUALENV_LENGTH:
|
@@ -251,8 +252,6 @@ def donwload_package_generate_documents(
|
|
251
252
|
|
252
253
|
for thread in threads:
|
253
254
|
thread.join()
|
254
|
-
end_time = time.time()
|
255
|
-
print(f"<initial_venv_cache>{end_time - start_time}<initial_venv_cache/>")
|
256
255
|
|
257
256
|
def donwload_one_package_generate_document(
|
258
257
|
package: PackageInfo,
|
@@ -287,7 +286,10 @@ def donwload_package_generate_documents(
|
|
287
286
|
package.path.move_document_to_target(os.path.join(output_doc_folder, package.name))
|
288
287
|
|
289
288
|
# Cache virtual environments
|
289
|
+
start_time = time.time()
|
290
290
|
initial_venv_cache()
|
291
|
+
end_time = time.time()
|
292
|
+
print(f"<initial_venv_cache>{end_time - start_time}<initial_venv_cache/>")
|
291
293
|
|
292
294
|
for package_index, package in enumerate(package_info_list):
|
293
295
|
worker_venv_id = str(package_index)
|
@@ -317,7 +319,10 @@ def donwload_package_generate_documents(
|
|
317
319
|
thread.join()
|
318
320
|
|
319
321
|
if output_doc_folder:
|
322
|
+
start_time = time.time()
|
320
323
|
move_root_toc_to_target(YAML_OUTPUT_ROOT, output_doc_folder)
|
324
|
+
end_time = time.time()
|
325
|
+
print(f"<move_root_toc_to_target>{end_time-start_time}<move_root_toc_to_target/>")
|
321
326
|
|
322
327
|
def prepare_out_dir(output_root: str | os.PathLike) -> os.PathLike | None:
|
323
328
|
# prepare output_root\DOC_FOLDER_NAME (if folder contains files, raise exception)
|
@@ -49,7 +49,6 @@ class VirtualEnvironmentManager:
|
|
49
49
|
install_package(package, executable)
|
50
50
|
|
51
51
|
def create_template_venv(self, venv_name: str) -> str:
|
52
|
-
start_time = time.time()
|
53
52
|
venv_dir = os.path.join(PACKAGE_ROOT, "venv", venv_name)
|
54
53
|
|
55
54
|
try:
|
@@ -59,10 +58,13 @@ class VirtualEnvironmentManager:
|
|
59
58
|
|
60
59
|
if os.name == "nt":
|
61
60
|
env_executable = os.path.join(venv_dir, "Scripts", "python.exe")
|
62
|
-
|
61
|
+
start_time = time.time()
|
63
62
|
install_converter_requirements(env_executable)
|
63
|
+
end_time = time.time()
|
64
|
+
print(f"<install_converter_requirements>{venv_name},{end_time-start_time}<install_converter_requirements/>")
|
64
65
|
if (self.required_package_list is not None) and (len(self.required_package_list) > 0):
|
66
|
+
start_time = time.time()
|
65
67
|
self.install_required_packages(env_executable)
|
66
|
-
|
67
|
-
|
68
|
+
end_time = time.time()
|
69
|
+
print(f"<install_required_packages>{venv_name},{end_time-start_time}<install_required_packages/>")
|
68
70
|
return venv_dir
|
@@ -1,5 +1,6 @@
|
|
1
1
|
from __future__ import annotations # Avoid A | B annotation break under <= py3.9
|
2
2
|
import os
|
3
|
+
import time
|
3
4
|
from py2docfx.convert_prepare.generate_conf import generate_conf
|
4
5
|
from py2docfx.convert_prepare.git import checkout
|
5
6
|
from py2docfx.convert_prepare.package_info import PackageInfo
|
@@ -9,6 +10,7 @@ from py2docfx.convert_prepare.subpackage import merge_subpackage_files
|
|
9
10
|
CONF_TEMPLATE_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "conf_templates")
|
10
11
|
|
11
12
|
def generate_document(pkg: PackageInfo, output_root: str | os.PathLike, executable: str):
|
13
|
+
start_time = time.time()
|
12
14
|
# Copy manual written RST from target doc repo
|
13
15
|
package_paths = pkg.path
|
14
16
|
if output_root:
|
@@ -36,11 +38,19 @@ def generate_document(pkg: PackageInfo, output_root: str | os.PathLike, executab
|
|
36
38
|
print(rst_file)
|
37
39
|
|
38
40
|
print("<CI INFO>: Running Sphinx build...")
|
41
|
+
end_time = time.time()
|
42
|
+
print(f"<pre_generate_conf>{pkg.name},{end_time-start_time}<pre_generate_conf/>")
|
43
|
+
start_time = time.time()
|
39
44
|
generate_conf(pkg, package_paths.doc_folder, CONF_TEMPLATE_DIR)
|
45
|
+
end_time = time.time()
|
46
|
+
print(f"<generate_conf>{pkg.name},{end_time-start_time}<generate_conf/>")
|
47
|
+
start_time = time.time()
|
40
48
|
run_converter(package_paths.doc_folder, package_paths.yaml_output_folder, executable)
|
41
|
-
|
49
|
+
end_time = time.time()
|
50
|
+
print(f"<run_converter>{pkg.name},{end_time-start_time}<run_converter/>")
|
42
51
|
subpackages_path_record = {}
|
43
52
|
if pkg.build_in_subpackage:
|
53
|
+
start_time = time.time()
|
44
54
|
subpackages_yaml_path = os.path.join(package_paths.yaml_output_folder, "subpackages")
|
45
55
|
for (subpackage_name, subpackage_path) in subpackages_rst_record.items():
|
46
56
|
subpackage_yaml_path = os.path.join(subpackages_yaml_path, subpackage_name)
|
@@ -48,3 +58,5 @@ def generate_document(pkg: PackageInfo, output_root: str | os.PathLike, executab
|
|
48
58
|
run_converter(subpackage_path, subpackage_yaml_path, executable)
|
49
59
|
|
50
60
|
merge_subpackage_files(subpackages_path_record, package_paths.yaml_output_folder, pkg.name)
|
61
|
+
end_time = time.time()
|
62
|
+
print(f"<merge_subpackage_files>{pkg.name},{end_time-start_time}<merge_subpackage_files/>")
|
@@ -2,6 +2,7 @@ import os.path as path
|
|
2
2
|
import os
|
3
3
|
import subprocess
|
4
4
|
import sys
|
5
|
+
import time
|
5
6
|
|
6
7
|
import py2docfx.convert_prepare.git as git
|
7
8
|
import py2docfx.convert_prepare.pip_utils as pip_utils
|
@@ -75,6 +76,7 @@ def get_source(pkg: PackageInfo, cnt: int, executable: str, vststoken=None, gith
|
|
75
76
|
dist_dir = path.join("dist_temp", path_cnt)
|
76
77
|
|
77
78
|
if pkg.install_type == PackageInfo.InstallType.SOURCE_CODE:
|
79
|
+
start_time = time.time()
|
78
80
|
if pkg.url:
|
79
81
|
repo_folder = path.join("source_repo", path_cnt)
|
80
82
|
token = githubtoken if "github.com" in pkg.url else vststoken
|
@@ -89,8 +91,10 @@ def get_source(pkg: PackageInfo, cnt: int, executable: str, vststoken=None, gith
|
|
89
91
|
else:
|
90
92
|
source_folder = pkg.folder
|
91
93
|
sys.path.insert(0, source_folder)
|
92
|
-
|
94
|
+
end_time = time.time()
|
95
|
+
print(f"<download_source>{pkg.name},{end_time-start_time}<download_source/>")
|
93
96
|
elif pkg.install_type == PackageInfo.InstallType.PYPI:
|
97
|
+
start_time = time.time()
|
94
98
|
full_name = pkg.get_combined_name_version()
|
95
99
|
pip_utils.download(
|
96
100
|
full_name,
|
@@ -99,7 +103,9 @@ def get_source(pkg: PackageInfo, cnt: int, executable: str, vststoken=None, gith
|
|
99
103
|
extra_index_url=pkg.extra_index_url,
|
100
104
|
prefer_source_distribution=pkg.prefer_source_distribution,
|
101
105
|
)
|
102
|
-
|
106
|
+
end_time = time.time()
|
107
|
+
print(f"<download_pypi>{pkg.name},{end_time-start_time}<download_pypi/>")
|
108
|
+
start_time = time.time()
|
103
109
|
# unpack the downloaded wheel file.
|
104
110
|
downloaded_dist_file = path.join(dist_dir, os.listdir(dist_dir)[0])
|
105
111
|
pack.unpack_dist(downloaded_dist_file)
|
@@ -108,9 +114,14 @@ def get_source(pkg: PackageInfo, cnt: int, executable: str, vststoken=None, gith
|
|
108
114
|
path.dirname(downloaded_dist_file),
|
109
115
|
os.listdir(dist_dir)[0]
|
110
116
|
)
|
117
|
+
end_time = time.time()
|
118
|
+
print(f"<unpack_pypi>{pkg.name},{end_time-start_time}<unpack_pypi/>")
|
111
119
|
elif pkg.install_type == PackageInfo.InstallType.DIST_FILE:
|
120
|
+
start_time = time.time()
|
112
121
|
pip_utils.download(pkg.location, dist_dir, executable, prefer_source_distribution=False)
|
113
|
-
|
122
|
+
end_time = time.time()
|
123
|
+
print(f"<download_dist>{pkg.name},{end_time-start_time}<download_dist/>")
|
124
|
+
start_time = time.time()
|
114
125
|
# unpack the downloaded dist file.
|
115
126
|
downloaded_dist_file = path.join(dist_dir, os.listdir(dist_dir)[0])
|
116
127
|
pack.unpack_dist(downloaded_dist_file)
|
@@ -122,7 +133,11 @@ def get_source(pkg: PackageInfo, cnt: int, executable: str, vststoken=None, gith
|
|
122
133
|
path.dirname(downloaded_dist_file),
|
123
134
|
os.listdir(dist_dir)[0]
|
124
135
|
)
|
136
|
+
end_time = time.time()
|
137
|
+
print(f"<unpack_dist>{pkg.name},{end_time-start_time}<unpack_dist/>")
|
125
138
|
else:
|
126
139
|
raise ValueError(f"Unknown install type: {pkg.install_type}")
|
127
|
-
|
140
|
+
start_time = time.time()
|
128
141
|
update_package_info(pkg, source_folder, executable)
|
142
|
+
end_time = time.time()
|
143
|
+
print(f"<update_package_info>{pkg.name},{end_time-start_time}<update_package_info/>")
|
@@ -32,7 +32,7 @@ class PackageInfo:
|
|
32
32
|
package_info = PackageInfo()
|
33
33
|
package_info.exclude_path = dict.get("exclude_path", [])
|
34
34
|
package_info.extension_config = dict.get("extension_config", {})
|
35
|
-
|
35
|
+
|
36
36
|
if reading_required_packages:
|
37
37
|
package_info_dict = dict
|
38
38
|
else:
|
@@ -1,5 +1,6 @@
|
|
1
1
|
import os
|
2
2
|
import sys
|
3
|
+
import time
|
3
4
|
from sphinx.application import Sphinx
|
4
5
|
import sphinx.ext.apidoc as apidoc
|
5
6
|
import sphinx.cmd.build
|
@@ -95,5 +96,8 @@ def run_converter(rst_path, out_path, executable, conf_path = None):
|
|
95
96
|
env_tmp["PYTHONPATH"] = f"{package_root_parent};{python_path_current}" if python_path_current else package_root_parent
|
96
97
|
else:
|
97
98
|
env_tmp["PYTHONPATH"] = f"{package_root_parent}:{python_path_current}" if python_path_current else package_root_parent
|
99
|
+
start_time = time.time()
|
98
100
|
subprocess.run(sphinx_param, check=True, cwd=PACKAGE_ROOT, env=env_tmp)
|
101
|
+
end_time = time.time()
|
102
|
+
print(f"<run_sphinx>{out_path},{end_time-start_time}<run_sphinx/>")
|
99
103
|
return outdir
|
@@ -17,8 +17,6 @@ package_info_1 = PackageInfo.parse_from(test_dict["packages"][1], False)
|
|
17
17
|
|
18
18
|
package_info_2 = PackageInfo.parse_from(test_dict["packages"][2], False)
|
19
19
|
|
20
|
-
package_info_3 = PackageInfo.parse_from(test_dict["packages"][3], False)
|
21
|
-
|
22
20
|
def test_parse_from():
|
23
21
|
assert package_info_0.exclude_path == ["test*", "example*", "sample*", "doc*"]
|
24
22
|
assert package_info_0.name == "azure-mltable-py2docfxtest"
|
@@ -28,11 +26,6 @@ def test_get_combined_name_version():
|
|
28
26
|
name_version = package_info_1.get_combined_name_version()
|
29
27
|
assert name_version == "azureml-accel-models==1.0.0"
|
30
28
|
|
31
|
-
def test_get_sphinx_extensions():
|
32
|
-
assert package_info_3.sphinx_extensions == ["sphinx-pydantic"]
|
33
|
-
assert package_info_3.name == "semantic-kernel"
|
34
|
-
assert package_info_3.install_type.name == "PYPI"
|
35
|
-
|
36
29
|
def test_intall_command():
|
37
30
|
install_command = package_info_0.get_install_command()
|
38
31
|
assert install_command[0] == "azure-mltable-py2docfxtest"
|
@@ -42,6 +35,28 @@ def test_intall_command():
|
|
42
35
|
assert install_command[0] == "azureml-accel-models==1.0.0"
|
43
36
|
assert install_command[1] == []
|
44
37
|
|
38
|
+
def test_get_exclude_command(tmp_path):
|
39
|
+
source_folder = os.path.join(tmp_path,"source_folder")
|
40
|
+
yaml_output_folder = os.path.join(tmp_path,"yaml_output_folder")
|
41
|
+
package_info_0.path = Source(
|
42
|
+
source_folder = source_folder, yaml_output_folder = yaml_output_folder, package_name = "azure-mltable-py2docfxtest"
|
43
|
+
)
|
44
|
+
exclude_path = package_info_0.get_exluded_command()
|
45
|
+
expected_exclude_path = [
|
46
|
+
"build/*",
|
47
|
+
"setup.py",
|
48
|
+
"test*",
|
49
|
+
"example*",
|
50
|
+
"sample*",
|
51
|
+
"doc*",
|
52
|
+
"azure/__init__.py",
|
53
|
+
"azure/mltable/__init__.py"
|
54
|
+
]
|
55
|
+
def form_exclude_path(raletive_path):
|
56
|
+
return os.path.join(source_folder, raletive_path)
|
57
|
+
assert exclude_path == [form_exclude_path(path) for path in expected_exclude_path]
|
58
|
+
|
59
|
+
|
45
60
|
def test_get_exclude_command(tmp_path):
|
46
61
|
source_folder = os.path.join(tmp_path,"source_folder")
|
47
62
|
yaml_output_folder = os.path.join(tmp_path,"yaml_output_folder")
|
@@ -21,11 +21,6 @@ def package_info_assert(package_info_list, required_package_info_list):
|
|
21
21
|
assert package.install_type.name == "PYPI"
|
22
22
|
assert package.exclude_path == []
|
23
23
|
|
24
|
-
package = package_info_list[3]
|
25
|
-
assert package.name == "semantic-kernel"
|
26
|
-
assert package.install_type.name == "PYPI"
|
27
|
-
assert package.sphinx_extensions == ["sphinx-pydantic"]
|
28
|
-
|
29
24
|
required_package = required_package_info_list[0]
|
30
25
|
assert required_package.name == None
|
31
26
|
assert required_package.install_type.name == "DIST_FILE"
|
@@ -201,7 +201,11 @@ class Locale:
|
|
201
201
|
|
202
202
|
identifier = str(self)
|
203
203
|
identifier_without_modifier = identifier.partition('@')[0]
|
204
|
-
if
|
204
|
+
if localedata.exists(identifier):
|
205
|
+
self.__data_identifier = identifier
|
206
|
+
elif localedata.exists(identifier_without_modifier):
|
207
|
+
self.__data_identifier = identifier_without_modifier
|
208
|
+
else:
|
205
209
|
raise UnknownLocaleError(identifier)
|
206
210
|
|
207
211
|
@classmethod
|
@@ -436,7 +440,7 @@ class Locale:
|
|
436
440
|
@property
|
437
441
|
def _data(self) -> localedata.LocaleDataDict:
|
438
442
|
if self.__data is None:
|
439
|
-
self.__data = localedata.LocaleDataDict(localedata.load(
|
443
|
+
self.__data = localedata.LocaleDataDict(localedata.load(self.__data_identifier))
|
440
444
|
return self.__data
|
441
445
|
|
442
446
|
def get_display_name(self, locale: Locale | str | None = None) -> str | None:
|
@@ -826,6 +826,10 @@ def format_skeleton(
|
|
826
826
|
Traceback (most recent call last):
|
827
827
|
...
|
828
828
|
KeyError: yMMd
|
829
|
+
>>> format_skeleton('GH', t, fuzzy=True, locale='fi_FI') # GH is not in the Finnish locale and there is no close match, an error is thrown
|
830
|
+
Traceback (most recent call last):
|
831
|
+
...
|
832
|
+
KeyError: None
|
829
833
|
|
830
834
|
After the skeleton is resolved to a pattern `format_datetime` is called so
|
831
835
|
all timezone processing etc is the same as for that.
|
@@ -835,7 +839,8 @@ def format_skeleton(
|
|
835
839
|
time in UTC is used
|
836
840
|
:param tzinfo: the time-zone to apply to the time for display
|
837
841
|
:param fuzzy: If the skeleton is not found, allow choosing a skeleton that's
|
838
|
-
close enough to it.
|
842
|
+
close enough to it. If there is no close match, a `KeyError`
|
843
|
+
is thrown.
|
839
844
|
:param locale: a `Locale` object or a locale identifier
|
840
845
|
"""
|
841
846
|
locale = Locale.parse(locale)
|
@@ -26,9 +26,11 @@ if TYPE_CHECKING:
|
|
26
26
|
DEFAULT_LOCALE = default_locale()
|
27
27
|
|
28
28
|
|
29
|
-
def format_list(
|
30
|
-
|
31
|
-
|
29
|
+
def format_list(
|
30
|
+
lst: Sequence[str],
|
31
|
+
style: Literal['standard', 'standard-short', 'or', 'or-short', 'unit', 'unit-short', 'unit-narrow'] = 'standard',
|
32
|
+
locale: Locale | str | None = DEFAULT_LOCALE,
|
33
|
+
) -> str:
|
32
34
|
"""
|
33
35
|
Format the items in `lst` as a list.
|
34
36
|
|
@@ -39,7 +41,11 @@ def format_list(lst: Sequence[str],
|
|
39
41
|
>>> format_list(['omena', 'peruna', 'aplari'], style='or', locale='fi')
|
40
42
|
u'omena, peruna tai aplari'
|
41
43
|
|
42
|
-
|
44
|
+
Not all styles are necessarily available in all locales.
|
45
|
+
The function will attempt to fall back to replacement styles according to the rules
|
46
|
+
set forth in the CLDR root XML file, and raise a ValueError if no suitable replacement
|
47
|
+
can be found.
|
48
|
+
|
43
49
|
The following text is verbatim from the Unicode TR35-49 spec [1].
|
44
50
|
|
45
51
|
* standard:
|
@@ -76,14 +82,9 @@ def format_list(lst: Sequence[str],
|
|
76
82
|
if len(lst) == 1:
|
77
83
|
return lst[0]
|
78
84
|
|
79
|
-
|
80
|
-
raise ValueError(
|
81
|
-
f'Locale {locale} does not support list formatting style {style!r} '
|
82
|
-
f'(supported are {sorted(locale.list_patterns)})',
|
83
|
-
)
|
84
|
-
patterns = locale.list_patterns[style]
|
85
|
+
patterns = _resolve_list_style(locale, style)
|
85
86
|
|
86
|
-
if len(lst) == 2:
|
87
|
+
if len(lst) == 2 and '2' in patterns:
|
87
88
|
return patterns['2'].format(*lst)
|
88
89
|
|
89
90
|
result = patterns['start'].format(lst[0], lst[1])
|
@@ -92,3 +93,31 @@ def format_list(lst: Sequence[str],
|
|
92
93
|
result = patterns['end'].format(result, lst[-1])
|
93
94
|
|
94
95
|
return result
|
96
|
+
|
97
|
+
|
98
|
+
# Based on CLDR 45's root.xml file's `<alias>`es.
|
99
|
+
# The root file defines both `standard` and `or`,
|
100
|
+
# so they're always available.
|
101
|
+
# TODO: It would likely be better to use the
|
102
|
+
# babel.localedata.Alias mechanism for this,
|
103
|
+
# but I'm not quite sure how it's supposed to
|
104
|
+
# work with inheritance and data in the root.
|
105
|
+
_style_fallbacks = {
|
106
|
+
"or-narrow": ["or-short", "or"],
|
107
|
+
"or-short": ["or"],
|
108
|
+
"standard-narrow": ["standard-short", "standard"],
|
109
|
+
"standard-short": ["standard"],
|
110
|
+
"unit": ["unit-short", "standard"],
|
111
|
+
"unit-narrow": ["unit-short", "unit", "standard"],
|
112
|
+
"unit-short": ["standard"],
|
113
|
+
}
|
114
|
+
|
115
|
+
|
116
|
+
def _resolve_list_style(locale: Locale, style: str):
|
117
|
+
for style in (style, *(_style_fallbacks.get(style, []))): # noqa: B020
|
118
|
+
if style in locale.list_patterns:
|
119
|
+
return locale.list_patterns[style]
|
120
|
+
raise ValueError(
|
121
|
+
f"Locale {locale} does not support list formatting style {style!r} "
|
122
|
+
f"(supported are {sorted(locale.list_patterns)})",
|
123
|
+
)
|
@@ -95,6 +95,27 @@ def locale_identifiers() -> list[str]:
|
|
95
95
|
]
|
96
96
|
|
97
97
|
|
98
|
+
def _is_non_likely_script(name: str) -> bool:
|
99
|
+
"""Return whether the locale is of the form ``lang_Script``,
|
100
|
+
and the script is not the likely script for the language.
|
101
|
+
|
102
|
+
This implements the behavior of the ``nonlikelyScript`` value of the
|
103
|
+
``localRules`` attribute for parent locales added in CLDR 45.
|
104
|
+
"""
|
105
|
+
from babel.core import get_global, parse_locale
|
106
|
+
|
107
|
+
try:
|
108
|
+
lang, territory, script, variant, *rest = parse_locale(name)
|
109
|
+
except ValueError:
|
110
|
+
return False
|
111
|
+
|
112
|
+
if lang and script and not territory and not variant and not rest:
|
113
|
+
likely_subtag = get_global('likely_subtags').get(lang)
|
114
|
+
_, _, likely_script, *_ = parse_locale(likely_subtag)
|
115
|
+
return script != likely_script
|
116
|
+
return False
|
117
|
+
|
118
|
+
|
98
119
|
def load(name: os.PathLike[str] | str, merge_inherited: bool = True) -> dict[str, Any]:
|
99
120
|
"""Load the locale data for the given locale.
|
100
121
|
|
@@ -132,8 +153,11 @@ def load(name: os.PathLike[str] | str, merge_inherited: bool = True) -> dict[str
|
|
132
153
|
from babel.core import get_global
|
133
154
|
parent = get_global('parent_exceptions').get(name)
|
134
155
|
if not parent:
|
135
|
-
|
136
|
-
|
156
|
+
if _is_non_likely_script(name):
|
157
|
+
parent = 'root'
|
158
|
+
else:
|
159
|
+
parts = name.split('_')
|
160
|
+
parent = "root" if len(parts) == 1 else "_".join(parts[:-1])
|
137
161
|
data = load(parent).copy()
|
138
162
|
filename = resolve_locale_filename(name)
|
139
163
|
with open(filename, 'rb') as fileobj:
|
@@ -2,7 +2,11 @@ try:
|
|
2
2
|
import pytz
|
3
3
|
except ModuleNotFoundError:
|
4
4
|
pytz = None
|
5
|
+
|
6
|
+
try:
|
5
7
|
import zoneinfo
|
8
|
+
except ModuleNotFoundError:
|
9
|
+
zoneinfo = None
|
6
10
|
|
7
11
|
|
8
12
|
def _get_tzinfo(tzenv: str):
|
@@ -19,6 +23,16 @@ def _get_tzinfo(tzenv: str):
|
|
19
23
|
else:
|
20
24
|
try:
|
21
25
|
return zoneinfo.ZoneInfo(tzenv)
|
26
|
+
except ValueError as ve:
|
27
|
+
# This is somewhat hacky, but since _validate_tzfile_path() doesn't
|
28
|
+
# raise a specific error type, we'll need to check the message to be
|
29
|
+
# one we know to be from that function.
|
30
|
+
# If so, we pretend it meant that the TZ didn't exist, for the benefit
|
31
|
+
# of `babel.localtime` catching the `LookupError` raised by
|
32
|
+
# `_get_tzinfo_or_raise()`.
|
33
|
+
# See https://github.com/python-babel/babel/issues/1092
|
34
|
+
if str(ve).startswith("ZoneInfo keys "):
|
35
|
+
return None
|
22
36
|
except zoneinfo.ZoneInfoNotFoundError:
|
23
37
|
pass
|
24
38
|
|
@@ -0,0 +1,34 @@
|
|
1
|
+
import sys
|
2
|
+
from functools import partial
|
3
|
+
|
4
|
+
|
5
|
+
def find_entrypoints(group_name: str):
|
6
|
+
"""
|
7
|
+
Find entrypoints of a given group using either `importlib.metadata` or the
|
8
|
+
older `pkg_resources` mechanism.
|
9
|
+
|
10
|
+
Yields tuples of the entrypoint name and a callable function that will
|
11
|
+
load the actual entrypoint.
|
12
|
+
"""
|
13
|
+
if sys.version_info >= (3, 10):
|
14
|
+
# "Changed in version 3.10: importlib.metadata is no longer provisional."
|
15
|
+
try:
|
16
|
+
from importlib.metadata import entry_points
|
17
|
+
except ImportError:
|
18
|
+
pass
|
19
|
+
else:
|
20
|
+
eps = entry_points(group=group_name)
|
21
|
+
# Only do this if this implementation of `importlib.metadata` is
|
22
|
+
# modern enough to not return a dict.
|
23
|
+
if not isinstance(eps, dict):
|
24
|
+
for entry_point in eps:
|
25
|
+
yield (entry_point.name, entry_point.load)
|
26
|
+
return
|
27
|
+
|
28
|
+
try:
|
29
|
+
from pkg_resources import working_set
|
30
|
+
except ImportError:
|
31
|
+
pass
|
32
|
+
else:
|
33
|
+
for entry_point in working_set.iter_entry_points(group_name):
|
34
|
+
yield (entry_point.name, partial(entry_point.load, require=True))
|
@@ -479,7 +479,11 @@ class Catalog:
|
|
479
479
|
self.last_translator = value
|
480
480
|
elif name == 'language':
|
481
481
|
value = value.replace('-', '_')
|
482
|
-
|
482
|
+
# The `or None` makes sure that the locale is set to None
|
483
|
+
# if the header's value is an empty string, which is what
|
484
|
+
# some tools generate (instead of eliding the empty Language
|
485
|
+
# header altogether).
|
486
|
+
self._set_locale(value or None)
|
483
487
|
elif name == 'language-team':
|
484
488
|
self.language_team = value
|
485
489
|
elif name == 'content-type':
|
@@ -155,16 +155,11 @@ def _validate_format(format: str, alternative: str) -> None:
|
|
155
155
|
|
156
156
|
|
157
157
|
def _find_checkers() -> list[Callable[[Catalog | None, Message], object]]:
|
158
|
+
from babel.messages._compat import find_entrypoints
|
158
159
|
checkers: list[Callable[[Catalog | None, Message], object]] = []
|
159
|
-
|
160
|
-
from pkg_resources import working_set
|
161
|
-
except ImportError:
|
162
|
-
pass
|
163
|
-
else:
|
164
|
-
for entry_point in working_set.iter_entry_points('babel.checkers'):
|
165
|
-
checkers.append(entry_point.load())
|
160
|
+
checkers.extend(load() for (name, load) in find_entrypoints('babel.checkers'))
|
166
161
|
if len(checkers) == 0:
|
167
|
-
# if
|
162
|
+
# if entrypoints are not available or no usable egg-info was found
|
168
163
|
# (see #230), just resort to hard-coded checkers
|
169
164
|
return [num_plurals, python_format]
|
170
165
|
return checkers
|
@@ -30,11 +30,13 @@ from collections.abc import (
|
|
30
30
|
Mapping,
|
31
31
|
MutableSequence,
|
32
32
|
)
|
33
|
+
from functools import lru_cache
|
33
34
|
from os.path import relpath
|
34
35
|
from textwrap import dedent
|
35
36
|
from tokenize import COMMENT, NAME, OP, STRING, generate_tokens
|
36
37
|
from typing import TYPE_CHECKING, Any
|
37
38
|
|
39
|
+
from babel.messages._compat import find_entrypoints
|
38
40
|
from babel.util import parse_encoding, parse_future_flags, pathmatch
|
39
41
|
|
40
42
|
if TYPE_CHECKING:
|
@@ -363,6 +365,14 @@ def _match_messages_against_spec(lineno: int, messages: list[str|None], comments
|
|
363
365
|
return lineno, translatable, comments, context
|
364
366
|
|
365
367
|
|
368
|
+
@lru_cache(maxsize=None)
|
369
|
+
def _find_extractor(name: str):
|
370
|
+
for ep_name, load in find_entrypoints(GROUP_NAME):
|
371
|
+
if ep_name == name:
|
372
|
+
return load()
|
373
|
+
return None
|
374
|
+
|
375
|
+
|
366
376
|
def extract(
|
367
377
|
method: _ExtractionMethod,
|
368
378
|
fileobj: _FileObj,
|
@@ -421,25 +431,11 @@ def extract(
|
|
421
431
|
module, attrname = method.split(':', 1)
|
422
432
|
func = getattr(__import__(module, {}, {}, [attrname]), attrname)
|
423
433
|
else:
|
424
|
-
|
425
|
-
from pkg_resources import working_set
|
426
|
-
except ImportError:
|
427
|
-
pass
|
428
|
-
else:
|
429
|
-
for entry_point in working_set.iter_entry_points(GROUP_NAME,
|
430
|
-
method):
|
431
|
-
func = entry_point.load(require=True)
|
432
|
-
break
|
434
|
+
func = _find_extractor(method)
|
433
435
|
if func is None:
|
434
|
-
# if
|
435
|
-
#
|
436
|
-
|
437
|
-
builtin = {
|
438
|
-
'ignore': extract_nothing,
|
439
|
-
'python': extract_python,
|
440
|
-
'javascript': extract_javascript,
|
441
|
-
}
|
442
|
-
func = builtin.get(method)
|
436
|
+
# if no named entry point was found,
|
437
|
+
# we resort to looking up a builtin extractor
|
438
|
+
func = _BUILTIN_EXTRACTORS.get(method)
|
443
439
|
|
444
440
|
if func is None:
|
445
441
|
raise ValueError(f"Unknown extraction method {method!r}")
|
@@ -640,13 +636,11 @@ def _parse_python_string(value: str, encoding: str, future_flags: int) -> str |
|
|
640
636
|
)
|
641
637
|
if isinstance(code, ast.Expression):
|
642
638
|
body = code.body
|
643
|
-
if isinstance(body, ast.
|
644
|
-
return body.
|
639
|
+
if isinstance(body, ast.Constant):
|
640
|
+
return body.value
|
645
641
|
if isinstance(body, ast.JoinedStr): # f-string
|
646
|
-
if all(isinstance(node, ast.Str) for node in body.values):
|
647
|
-
return ''.join(node.s for node in body.values)
|
648
642
|
if all(isinstance(node, ast.Constant) for node in body.values):
|
649
|
-
return ''.join(
|
643
|
+
return ''.join(node.value for node in body.values)
|
650
644
|
# TODO: we could raise an error or warning when not all nodes are constants
|
651
645
|
return None
|
652
646
|
|
@@ -840,3 +834,10 @@ def parse_template_string(
|
|
840
834
|
lineno += len(line_re.findall(expression_contents))
|
841
835
|
expression_contents = ''
|
842
836
|
prev_character = character
|
837
|
+
|
838
|
+
|
839
|
+
_BUILTIN_EXTRACTORS = {
|
840
|
+
'ignore': extract_nothing,
|
841
|
+
'python': extract_python,
|
842
|
+
'javascript': extract_javascript,
|
843
|
+
}
|