py2docfx 0.1.10.dev1818319__py3-none-any.whl → 0.1.10.dev1824234__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. py2docfx/__main__.py +14 -9
  2. py2docfx/convert_prepare/environment.py +6 -4
  3. py2docfx/convert_prepare/generate_document.py +13 -1
  4. py2docfx/convert_prepare/get_source.py +19 -4
  5. py2docfx/convert_prepare/package_info.py +1 -1
  6. py2docfx/convert_prepare/pip_utils.py +0 -1
  7. py2docfx/convert_prepare/sphinx_caller.py +4 -0
  8. py2docfx/convert_prepare/tests/test_package_info.py +22 -7
  9. py2docfx/convert_prepare/tests/test_params.py +0 -5
  10. py2docfx/venv/0/Lib/site-packages/babel/__init__.py +1 -1
  11. py2docfx/venv/0/Lib/site-packages/babel/core.py +6 -2
  12. py2docfx/venv/0/Lib/site-packages/babel/dates.py +6 -1
  13. py2docfx/venv/0/Lib/site-packages/babel/lists.py +40 -11
  14. py2docfx/venv/0/Lib/site-packages/babel/localedata.py +26 -2
  15. py2docfx/venv/0/Lib/site-packages/babel/localtime/_helpers.py +14 -0
  16. py2docfx/venv/0/Lib/site-packages/babel/messages/_compat.py +34 -0
  17. py2docfx/venv/0/Lib/site-packages/babel/messages/catalog.py +5 -1
  18. py2docfx/venv/0/Lib/site-packages/babel/messages/checkers.py +3 -8
  19. py2docfx/venv/0/Lib/site-packages/babel/messages/extract.py +24 -23
  20. py2docfx/venv/0/Lib/site-packages/babel/messages/frontend.py +122 -48
  21. py2docfx/venv/0/Lib/site-packages/babel/plural.py +1 -2
  22. py2docfx/venv/0/Lib/site-packages/babel/support.py +6 -4
  23. py2docfx/venv/0/Lib/site-packages/yaml/__init__.py +1 -1
  24. py2docfx/venv/template/Lib/site-packages/babel/__init__.py +1 -1
  25. py2docfx/venv/template/Lib/site-packages/babel/core.py +6 -2
  26. py2docfx/venv/template/Lib/site-packages/babel/dates.py +6 -1
  27. py2docfx/venv/template/Lib/site-packages/babel/lists.py +40 -11
  28. py2docfx/venv/template/Lib/site-packages/babel/localedata.py +26 -2
  29. py2docfx/venv/template/Lib/site-packages/babel/localtime/_helpers.py +14 -0
  30. py2docfx/venv/template/Lib/site-packages/babel/messages/_compat.py +34 -0
  31. py2docfx/venv/template/Lib/site-packages/babel/messages/catalog.py +5 -1
  32. py2docfx/venv/template/Lib/site-packages/babel/messages/checkers.py +3 -8
  33. py2docfx/venv/template/Lib/site-packages/babel/messages/extract.py +24 -23
  34. py2docfx/venv/template/Lib/site-packages/babel/messages/frontend.py +122 -48
  35. py2docfx/venv/template/Lib/site-packages/babel/plural.py +1 -2
  36. py2docfx/venv/template/Lib/site-packages/babel/support.py +6 -4
  37. py2docfx/venv/template/Lib/site-packages/yaml/__init__.py +1 -1
  38. {py2docfx-0.1.10.dev1818319.dist-info → py2docfx-0.1.10.dev1824234.dist-info}/METADATA +1 -1
  39. {py2docfx-0.1.10.dev1818319.dist-info → py2docfx-0.1.10.dev1824234.dist-info}/RECORD +41 -39
  40. {py2docfx-0.1.10.dev1818319.dist-info → py2docfx-0.1.10.dev1824234.dist-info}/WHEEL +0 -0
  41. {py2docfx-0.1.10.dev1818319.dist-info → py2docfx-0.1.10.dev1824234.dist-info}/top_level.txt +0 -0
@@ -30,11 +30,13 @@ from collections.abc import (
30
30
  Mapping,
31
31
  MutableSequence,
32
32
  )
33
+ from functools import lru_cache
33
34
  from os.path import relpath
34
35
  from textwrap import dedent
35
36
  from tokenize import COMMENT, NAME, OP, STRING, generate_tokens
36
37
  from typing import TYPE_CHECKING, Any
37
38
 
39
+ from babel.messages._compat import find_entrypoints
38
40
  from babel.util import parse_encoding, parse_future_flags, pathmatch
39
41
 
40
42
  if TYPE_CHECKING:
@@ -363,6 +365,14 @@ def _match_messages_against_spec(lineno: int, messages: list[str|None], comments
363
365
  return lineno, translatable, comments, context
364
366
 
365
367
 
368
+ @lru_cache(maxsize=None)
369
+ def _find_extractor(name: str):
370
+ for ep_name, load in find_entrypoints(GROUP_NAME):
371
+ if ep_name == name:
372
+ return load()
373
+ return None
374
+
375
+
366
376
  def extract(
367
377
  method: _ExtractionMethod,
368
378
  fileobj: _FileObj,
@@ -421,25 +431,11 @@ def extract(
421
431
  module, attrname = method.split(':', 1)
422
432
  func = getattr(__import__(module, {}, {}, [attrname]), attrname)
423
433
  else:
424
- try:
425
- from pkg_resources import working_set
426
- except ImportError:
427
- pass
428
- else:
429
- for entry_point in working_set.iter_entry_points(GROUP_NAME,
430
- method):
431
- func = entry_point.load(require=True)
432
- break
434
+ func = _find_extractor(method)
433
435
  if func is None:
434
- # if pkg_resources is not available or no usable egg-info was found
435
- # (see #230), we resort to looking up the builtin extractors
436
- # directly
437
- builtin = {
438
- 'ignore': extract_nothing,
439
- 'python': extract_python,
440
- 'javascript': extract_javascript,
441
- }
442
- func = builtin.get(method)
436
+ # if no named entry point was found,
437
+ # we resort to looking up a builtin extractor
438
+ func = _BUILTIN_EXTRACTORS.get(method)
443
439
 
444
440
  if func is None:
445
441
  raise ValueError(f"Unknown extraction method {method!r}")
@@ -640,13 +636,11 @@ def _parse_python_string(value: str, encoding: str, future_flags: int) -> str |
640
636
  )
641
637
  if isinstance(code, ast.Expression):
642
638
  body = code.body
643
- if isinstance(body, ast.Str):
644
- return body.s
639
+ if isinstance(body, ast.Constant):
640
+ return body.value
645
641
  if isinstance(body, ast.JoinedStr): # f-string
646
- if all(isinstance(node, ast.Str) for node in body.values):
647
- return ''.join(node.s for node in body.values)
648
642
  if all(isinstance(node, ast.Constant) for node in body.values):
649
- return ''.join(str(node.value) for node in body.values)
643
+ return ''.join(node.value for node in body.values)
650
644
  # TODO: we could raise an error or warning when not all nodes are constants
651
645
  return None
652
646
 
@@ -840,3 +834,10 @@ def parse_template_string(
840
834
  lineno += len(line_re.findall(expression_contents))
841
835
  expression_contents = ''
842
836
  prev_character = character
837
+
838
+
839
+ _BUILTIN_EXTRACTORS = {
840
+ 'ignore': extract_nothing,
841
+ 'python': extract_python,
842
+ 'javascript': extract_javascript,
843
+ }
@@ -19,10 +19,11 @@ import re
19
19
  import shutil
20
20
  import sys
21
21
  import tempfile
22
+ import warnings
22
23
  from collections import OrderedDict
23
24
  from configparser import RawConfigParser
24
25
  from io import StringIO
25
- from typing import Iterable
26
+ from typing import BinaryIO, Iterable, Literal
26
27
 
27
28
  from babel import Locale, localedata
28
29
  from babel import __version__ as VERSION
@@ -53,6 +54,12 @@ class SetupError(BaseError):
53
54
  pass
54
55
 
55
56
 
57
+ class ConfigurationError(BaseError):
58
+ """
59
+ Raised for errors in configuration files.
60
+ """
61
+
62
+
56
63
  def listify_value(arg, split=None):
57
64
  """
58
65
  Make a list out of an argument.
@@ -458,7 +465,7 @@ class ExtractMessages(CommandMixin):
458
465
 
459
466
  ignore_dirs = listify_value(self.ignore_dirs)
460
467
  if ignore_dirs:
461
- self.directory_filter = _make_directory_filter(self.ignore_dirs)
468
+ self.directory_filter = _make_directory_filter(ignore_dirs)
462
469
  else:
463
470
  self.directory_filter = None
464
471
 
@@ -534,8 +541,21 @@ class ExtractMessages(CommandMixin):
534
541
  mappings = []
535
542
 
536
543
  if self.mapping_file:
537
- with open(self.mapping_file) as fileobj:
538
- method_map, options_map = parse_mapping(fileobj)
544
+ if self.mapping_file.endswith(".toml"):
545
+ with open(self.mapping_file, "rb") as fileobj:
546
+ file_style = (
547
+ "pyproject.toml"
548
+ if os.path.basename(self.mapping_file) == "pyproject.toml"
549
+ else "standalone"
550
+ )
551
+ method_map, options_map = _parse_mapping_toml(
552
+ fileobj,
553
+ filename=self.mapping_file,
554
+ style=file_style,
555
+ )
556
+ else:
557
+ with open(self.mapping_file) as fileobj:
558
+ method_map, options_map = parse_mapping_cfg(fileobj, filename=self.mapping_file)
539
559
  for path in self.input_paths:
540
560
  mappings.append((path, method_map, options_map))
541
561
 
@@ -543,7 +563,7 @@ class ExtractMessages(CommandMixin):
543
563
  message_extractors = self.distribution.message_extractors
544
564
  for path, mapping in message_extractors.items():
545
565
  if isinstance(mapping, str):
546
- method_map, options_map = parse_mapping(StringIO(mapping))
566
+ method_map, options_map = parse_mapping_cfg(StringIO(mapping))
547
567
  else:
548
568
  method_map, options_map = [], {}
549
569
  for pattern, method, options in mapping:
@@ -980,53 +1000,19 @@ def main():
980
1000
 
981
1001
 
982
1002
  def parse_mapping(fileobj, filename=None):
983
- """Parse an extraction method mapping from a file-like object.
1003
+ warnings.warn(
1004
+ "parse_mapping is deprecated, use parse_mapping_cfg instead",
1005
+ DeprecationWarning,
1006
+ stacklevel=2,
1007
+ )
1008
+ return parse_mapping_cfg(fileobj, filename)
984
1009
 
985
- >>> buf = StringIO('''
986
- ... [extractors]
987
- ... custom = mypackage.module:myfunc
988
- ...
989
- ... # Python source files
990
- ... [python: **.py]
991
- ...
992
- ... # Genshi templates
993
- ... [genshi: **/templates/**.html]
994
- ... include_attrs =
995
- ... [genshi: **/templates/**.txt]
996
- ... template_class = genshi.template:TextTemplate
997
- ... encoding = latin-1
998
- ...
999
- ... # Some custom extractor
1000
- ... [custom: **/custom/*.*]
1001
- ... ''')
1002
-
1003
- >>> method_map, options_map = parse_mapping(buf)
1004
- >>> len(method_map)
1005
- 4
1006
-
1007
- >>> method_map[0]
1008
- ('**.py', 'python')
1009
- >>> options_map['**.py']
1010
- {}
1011
- >>> method_map[1]
1012
- ('**/templates/**.html', 'genshi')
1013
- >>> options_map['**/templates/**.html']['include_attrs']
1014
- ''
1015
- >>> method_map[2]
1016
- ('**/templates/**.txt', 'genshi')
1017
- >>> options_map['**/templates/**.txt']['template_class']
1018
- 'genshi.template:TextTemplate'
1019
- >>> options_map['**/templates/**.txt']['encoding']
1020
- 'latin-1'
1021
-
1022
- >>> method_map[3]
1023
- ('**/custom/*.*', 'mypackage.module:myfunc')
1024
- >>> options_map['**/custom/*.*']
1025
- {}
1010
+
1011
+ def parse_mapping_cfg(fileobj, filename=None):
1012
+ """Parse an extraction method mapping from a file-like object.
1026
1013
 
1027
1014
  :param fileobj: a readable file-like object containing the configuration
1028
1015
  text to parse
1029
- :see: `extract_from_directory`
1030
1016
  """
1031
1017
  extractors = {}
1032
1018
  method_map = []
@@ -1053,6 +1039,94 @@ def parse_mapping(fileobj, filename=None):
1053
1039
  return method_map, options_map
1054
1040
 
1055
1041
 
1042
+ def _parse_config_object(config: dict, *, filename="(unknown)"):
1043
+ extractors = {}
1044
+ method_map = []
1045
+ options_map = {}
1046
+
1047
+ extractors_read = config.get("extractors", {})
1048
+ if not isinstance(extractors_read, dict):
1049
+ raise ConfigurationError(f"{filename}: extractors: Expected a dictionary, got {type(extractors_read)!r}")
1050
+ for method, callable_spec in extractors_read.items():
1051
+ if not isinstance(method, str):
1052
+ # Impossible via TOML, but could happen with a custom object.
1053
+ raise ConfigurationError(f"{filename}: extractors: Extraction method must be a string, got {method!r}")
1054
+ if not isinstance(callable_spec, str):
1055
+ raise ConfigurationError(f"{filename}: extractors: Callable specification must be a string, got {callable_spec!r}")
1056
+ extractors[method] = callable_spec
1057
+
1058
+ if "mapping" in config:
1059
+ raise ConfigurationError(f"{filename}: 'mapping' is not a valid key, did you mean 'mappings'?")
1060
+
1061
+ mappings_read = config.get("mappings", [])
1062
+ if not isinstance(mappings_read, list):
1063
+ raise ConfigurationError(f"{filename}: mappings: Expected a list, got {type(mappings_read)!r}")
1064
+ for idx, entry in enumerate(mappings_read):
1065
+ if not isinstance(entry, dict):
1066
+ raise ConfigurationError(f"{filename}: mappings[{idx}]: Expected a dictionary, got {type(entry)!r}")
1067
+ entry = entry.copy()
1068
+
1069
+ method = entry.pop("method", None)
1070
+ if not isinstance(method, str):
1071
+ raise ConfigurationError(f"{filename}: mappings[{idx}]: 'method' must be a string, got {method!r}")
1072
+ method = extractors.get(method, method) # Map the extractor name to the callable now
1073
+
1074
+ pattern = entry.pop("pattern", None)
1075
+ if not isinstance(pattern, (list, str)):
1076
+ raise ConfigurationError(f"{filename}: mappings[{idx}]: 'pattern' must be a list or a string, got {pattern!r}")
1077
+ if not isinstance(pattern, list):
1078
+ pattern = [pattern]
1079
+
1080
+ for pat in pattern:
1081
+ if not isinstance(pat, str):
1082
+ raise ConfigurationError(f"{filename}: mappings[{idx}]: 'pattern' elements must be strings, got {pat!r}")
1083
+ method_map.append((pat, method))
1084
+ options_map[pat] = entry
1085
+
1086
+ return method_map, options_map
1087
+
1088
+
1089
+ def _parse_mapping_toml(
1090
+ fileobj: BinaryIO,
1091
+ filename: str = "(unknown)",
1092
+ style: Literal["standalone", "pyproject.toml"] = "standalone",
1093
+ ):
1094
+ """Parse an extraction method mapping from a binary file-like object.
1095
+
1096
+ .. warning: As of this version of Babel, this is a private API subject to changes.
1097
+
1098
+ :param fileobj: a readable binary file-like object containing the configuration TOML to parse
1099
+ :param filename: the name of the file being parsed, for error messages
1100
+ :param style: whether the file is in the style of a `pyproject.toml` file, i.e. whether to look for `tool.babel`.
1101
+ """
1102
+ try:
1103
+ import tomllib
1104
+ except ImportError:
1105
+ try:
1106
+ import tomli as tomllib
1107
+ except ImportError as ie: # pragma: no cover
1108
+ raise ImportError("tomli or tomllib is required to parse TOML files") from ie
1109
+
1110
+ try:
1111
+ parsed_data = tomllib.load(fileobj)
1112
+ except tomllib.TOMLDecodeError as e:
1113
+ raise ConfigurationError(f"{filename}: Error parsing TOML file: {e}") from e
1114
+
1115
+ if style == "pyproject.toml":
1116
+ try:
1117
+ babel_data = parsed_data["tool"]["babel"]
1118
+ except (TypeError, KeyError) as e:
1119
+ raise ConfigurationError(f"{filename}: No 'tool.babel' section found in file") from e
1120
+ elif style == "standalone":
1121
+ babel_data = parsed_data
1122
+ if "babel" in babel_data:
1123
+ raise ConfigurationError(f"{filename}: 'babel' should not be present in a stand-alone configuration file")
1124
+ else: # pragma: no cover
1125
+ raise ValueError(f"Unknown TOML style {style!r}")
1126
+
1127
+ return _parse_config_object(babel_data, filename=filename)
1128
+
1129
+
1056
1130
  def _parse_spec(s: str) -> tuple[int | None, tuple[int | tuple[int, str], ...]]:
1057
1131
  inds = []
1058
1132
  number = None
@@ -360,8 +360,7 @@ def tokenize_rule(s: str) -> list[tuple[str, str]]:
360
360
  result.append((tok, match.group()))
361
361
  break
362
362
  else:
363
- raise RuleError('malformed CLDR pluralization rule. '
364
- 'Got unexpected %r' % s[pos])
363
+ raise RuleError(f"malformed CLDR pluralization rule. Got unexpected {s[pos]!r}")
365
364
  return result[::-1]
366
365
 
367
366
 
@@ -466,10 +466,12 @@ class NullTranslations(gettext.NullTranslations):
466
466
  missing = object()
467
467
  tmsg = self._catalog.get(ctxt_msg_id, missing)
468
468
  if tmsg is missing:
469
- if self._fallback:
470
- return self._fallback.pgettext(context, message)
471
- return message
472
- return tmsg
469
+ tmsg = self._catalog.get((ctxt_msg_id, self.plural(1)), missing)
470
+ if tmsg is not missing:
471
+ return tmsg
472
+ if self._fallback:
473
+ return self._fallback.pgettext(context, message)
474
+ return message
473
475
 
474
476
  def lpgettext(self, context: str, message: str) -> str | bytes | object:
475
477
  """Equivalent to ``pgettext()``, but the translation is returned in the
@@ -8,7 +8,7 @@ from .nodes import *
8
8
  from .loader import *
9
9
  from .dumper import *
10
10
 
11
- __version__ = '6.0.1'
11
+ __version__ = '6.0.2'
12
12
  try:
13
13
  from .cyaml import *
14
14
  __with_libyaml__ = True
@@ -25,7 +25,7 @@ from babel.core import (
25
25
  parse_locale,
26
26
  )
27
27
 
28
- __version__ = '2.15.0'
28
+ __version__ = '2.16.0'
29
29
 
30
30
  __all__ = [
31
31
  'Locale',
@@ -201,7 +201,11 @@ class Locale:
201
201
 
202
202
  identifier = str(self)
203
203
  identifier_without_modifier = identifier.partition('@')[0]
204
- if not localedata.exists(identifier_without_modifier):
204
+ if localedata.exists(identifier):
205
+ self.__data_identifier = identifier
206
+ elif localedata.exists(identifier_without_modifier):
207
+ self.__data_identifier = identifier_without_modifier
208
+ else:
205
209
  raise UnknownLocaleError(identifier)
206
210
 
207
211
  @classmethod
@@ -436,7 +440,7 @@ class Locale:
436
440
  @property
437
441
  def _data(self) -> localedata.LocaleDataDict:
438
442
  if self.__data is None:
439
- self.__data = localedata.LocaleDataDict(localedata.load(str(self)))
443
+ self.__data = localedata.LocaleDataDict(localedata.load(self.__data_identifier))
440
444
  return self.__data
441
445
 
442
446
  def get_display_name(self, locale: Locale | str | None = None) -> str | None:
@@ -826,6 +826,10 @@ def format_skeleton(
826
826
  Traceback (most recent call last):
827
827
  ...
828
828
  KeyError: yMMd
829
+ >>> format_skeleton('GH', t, fuzzy=True, locale='fi_FI') # GH is not in the Finnish locale and there is no close match, an error is thrown
830
+ Traceback (most recent call last):
831
+ ...
832
+ KeyError: None
829
833
 
830
834
  After the skeleton is resolved to a pattern `format_datetime` is called so
831
835
  all timezone processing etc is the same as for that.
@@ -835,7 +839,8 @@ def format_skeleton(
835
839
  time in UTC is used
836
840
  :param tzinfo: the time-zone to apply to the time for display
837
841
  :param fuzzy: If the skeleton is not found, allow choosing a skeleton that's
838
- close enough to it.
842
+ close enough to it. If there is no close match, a `KeyError`
843
+ is thrown.
839
844
  :param locale: a `Locale` object or a locale identifier
840
845
  """
841
846
  locale = Locale.parse(locale)
@@ -26,9 +26,11 @@ if TYPE_CHECKING:
26
26
  DEFAULT_LOCALE = default_locale()
27
27
 
28
28
 
29
- def format_list(lst: Sequence[str],
30
- style: Literal['standard', 'standard-short', 'or', 'or-short', 'unit', 'unit-short', 'unit-narrow'] = 'standard',
31
- locale: Locale | str | None = DEFAULT_LOCALE) -> str:
29
+ def format_list(
30
+ lst: Sequence[str],
31
+ style: Literal['standard', 'standard-short', 'or', 'or-short', 'unit', 'unit-short', 'unit-narrow'] = 'standard',
32
+ locale: Locale | str | None = DEFAULT_LOCALE,
33
+ ) -> str:
32
34
  """
33
35
  Format the items in `lst` as a list.
34
36
 
@@ -39,7 +41,11 @@ def format_list(lst: Sequence[str],
39
41
  >>> format_list(['omena', 'peruna', 'aplari'], style='or', locale='fi')
40
42
  u'omena, peruna tai aplari'
41
43
 
42
- These styles are defined, but not all are necessarily available in all locales.
44
+ Not all styles are necessarily available in all locales.
45
+ The function will attempt to fall back to replacement styles according to the rules
46
+ set forth in the CLDR root XML file, and raise a ValueError if no suitable replacement
47
+ can be found.
48
+
43
49
  The following text is verbatim from the Unicode TR35-49 spec [1].
44
50
 
45
51
  * standard:
@@ -76,14 +82,9 @@ def format_list(lst: Sequence[str],
76
82
  if len(lst) == 1:
77
83
  return lst[0]
78
84
 
79
- if style not in locale.list_patterns:
80
- raise ValueError(
81
- f'Locale {locale} does not support list formatting style {style!r} '
82
- f'(supported are {sorted(locale.list_patterns)})',
83
- )
84
- patterns = locale.list_patterns[style]
85
+ patterns = _resolve_list_style(locale, style)
85
86
 
86
- if len(lst) == 2:
87
+ if len(lst) == 2 and '2' in patterns:
87
88
  return patterns['2'].format(*lst)
88
89
 
89
90
  result = patterns['start'].format(lst[0], lst[1])
@@ -92,3 +93,31 @@ def format_list(lst: Sequence[str],
92
93
  result = patterns['end'].format(result, lst[-1])
93
94
 
94
95
  return result
96
+
97
+
98
+ # Based on CLDR 45's root.xml file's `<alias>`es.
99
+ # The root file defines both `standard` and `or`,
100
+ # so they're always available.
101
+ # TODO: It would likely be better to use the
102
+ # babel.localedata.Alias mechanism for this,
103
+ # but I'm not quite sure how it's supposed to
104
+ # work with inheritance and data in the root.
105
+ _style_fallbacks = {
106
+ "or-narrow": ["or-short", "or"],
107
+ "or-short": ["or"],
108
+ "standard-narrow": ["standard-short", "standard"],
109
+ "standard-short": ["standard"],
110
+ "unit": ["unit-short", "standard"],
111
+ "unit-narrow": ["unit-short", "unit", "standard"],
112
+ "unit-short": ["standard"],
113
+ }
114
+
115
+
116
+ def _resolve_list_style(locale: Locale, style: str):
117
+ for style in (style, *(_style_fallbacks.get(style, []))): # noqa: B020
118
+ if style in locale.list_patterns:
119
+ return locale.list_patterns[style]
120
+ raise ValueError(
121
+ f"Locale {locale} does not support list formatting style {style!r} "
122
+ f"(supported are {sorted(locale.list_patterns)})",
123
+ )
@@ -95,6 +95,27 @@ def locale_identifiers() -> list[str]:
95
95
  ]
96
96
 
97
97
 
98
+ def _is_non_likely_script(name: str) -> bool:
99
+ """Return whether the locale is of the form ``lang_Script``,
100
+ and the script is not the likely script for the language.
101
+
102
+ This implements the behavior of the ``nonlikelyScript`` value of the
103
+ ``localRules`` attribute for parent locales added in CLDR 45.
104
+ """
105
+ from babel.core import get_global, parse_locale
106
+
107
+ try:
108
+ lang, territory, script, variant, *rest = parse_locale(name)
109
+ except ValueError:
110
+ return False
111
+
112
+ if lang and script and not territory and not variant and not rest:
113
+ likely_subtag = get_global('likely_subtags').get(lang)
114
+ _, _, likely_script, *_ = parse_locale(likely_subtag)
115
+ return script != likely_script
116
+ return False
117
+
118
+
98
119
  def load(name: os.PathLike[str] | str, merge_inherited: bool = True) -> dict[str, Any]:
99
120
  """Load the locale data for the given locale.
100
121
 
@@ -132,8 +153,11 @@ def load(name: os.PathLike[str] | str, merge_inherited: bool = True) -> dict[str
132
153
  from babel.core import get_global
133
154
  parent = get_global('parent_exceptions').get(name)
134
155
  if not parent:
135
- parts = name.split('_')
136
- parent = "root" if len(parts) == 1 else "_".join(parts[:-1])
156
+ if _is_non_likely_script(name):
157
+ parent = 'root'
158
+ else:
159
+ parts = name.split('_')
160
+ parent = "root" if len(parts) == 1 else "_".join(parts[:-1])
137
161
  data = load(parent).copy()
138
162
  filename = resolve_locale_filename(name)
139
163
  with open(filename, 'rb') as fileobj:
@@ -2,7 +2,11 @@ try:
2
2
  import pytz
3
3
  except ModuleNotFoundError:
4
4
  pytz = None
5
+
6
+ try:
5
7
  import zoneinfo
8
+ except ModuleNotFoundError:
9
+ zoneinfo = None
6
10
 
7
11
 
8
12
  def _get_tzinfo(tzenv: str):
@@ -19,6 +23,16 @@ def _get_tzinfo(tzenv: str):
19
23
  else:
20
24
  try:
21
25
  return zoneinfo.ZoneInfo(tzenv)
26
+ except ValueError as ve:
27
+ # This is somewhat hacky, but since _validate_tzfile_path() doesn't
28
+ # raise a specific error type, we'll need to check the message to be
29
+ # one we know to be from that function.
30
+ # If so, we pretend it meant that the TZ didn't exist, for the benefit
31
+ # of `babel.localtime` catching the `LookupError` raised by
32
+ # `_get_tzinfo_or_raise()`.
33
+ # See https://github.com/python-babel/babel/issues/1092
34
+ if str(ve).startswith("ZoneInfo keys "):
35
+ return None
22
36
  except zoneinfo.ZoneInfoNotFoundError:
23
37
  pass
24
38
 
@@ -0,0 +1,34 @@
1
+ import sys
2
+ from functools import partial
3
+
4
+
5
+ def find_entrypoints(group_name: str):
6
+ """
7
+ Find entrypoints of a given group using either `importlib.metadata` or the
8
+ older `pkg_resources` mechanism.
9
+
10
+ Yields tuples of the entrypoint name and a callable function that will
11
+ load the actual entrypoint.
12
+ """
13
+ if sys.version_info >= (3, 10):
14
+ # "Changed in version 3.10: importlib.metadata is no longer provisional."
15
+ try:
16
+ from importlib.metadata import entry_points
17
+ except ImportError:
18
+ pass
19
+ else:
20
+ eps = entry_points(group=group_name)
21
+ # Only do this if this implementation of `importlib.metadata` is
22
+ # modern enough to not return a dict.
23
+ if not isinstance(eps, dict):
24
+ for entry_point in eps:
25
+ yield (entry_point.name, entry_point.load)
26
+ return
27
+
28
+ try:
29
+ from pkg_resources import working_set
30
+ except ImportError:
31
+ pass
32
+ else:
33
+ for entry_point in working_set.iter_entry_points(group_name):
34
+ yield (entry_point.name, partial(entry_point.load, require=True))
@@ -479,7 +479,11 @@ class Catalog:
479
479
  self.last_translator = value
480
480
  elif name == 'language':
481
481
  value = value.replace('-', '_')
482
- self._set_locale(value)
482
+ # The `or None` makes sure that the locale is set to None
483
+ # if the header's value is an empty string, which is what
484
+ # some tools generate (instead of eliding the empty Language
485
+ # header altogether).
486
+ self._set_locale(value or None)
483
487
  elif name == 'language-team':
484
488
  self.language_team = value
485
489
  elif name == 'content-type':
@@ -155,16 +155,11 @@ def _validate_format(format: str, alternative: str) -> None:
155
155
 
156
156
 
157
157
  def _find_checkers() -> list[Callable[[Catalog | None, Message], object]]:
158
+ from babel.messages._compat import find_entrypoints
158
159
  checkers: list[Callable[[Catalog | None, Message], object]] = []
159
- try:
160
- from pkg_resources import working_set
161
- except ImportError:
162
- pass
163
- else:
164
- for entry_point in working_set.iter_entry_points('babel.checkers'):
165
- checkers.append(entry_point.load())
160
+ checkers.extend(load() for (name, load) in find_entrypoints('babel.checkers'))
166
161
  if len(checkers) == 0:
167
- # if pkg_resources is not available or no usable egg-info was found
162
+ # if entrypoints are not available or no usable egg-info was found
168
163
  # (see #230), just resort to hard-coded checkers
169
164
  return [num_plurals, python_format]
170
165
  return checkers