android-notify 1.60.0__py3-none-any.whl → 1.60.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of android-notify might be problematic. Click here for more details.

Files changed (375) hide show
  1. android_notify/config.py +1 -1
  2. {android_notify-1.60.0.dist-info → android_notify-1.60.2.dist-info}/METADATA +4 -4
  3. {android_notify-1.60.0.dist-info → android_notify-1.60.2.dist-info}/RECORD +375 -6
  4. venv/Lib/site-packages/_distutils_hack/__init__.py +239 -0
  5. venv/Lib/site-packages/_distutils_hack/override.py +1 -0
  6. venv/Lib/site-packages/pkg_resources/__init__.py +3713 -0
  7. venv/Lib/site-packages/pkg_resources/py.typed +0 -0
  8. venv/Lib/site-packages/pkg_resources/tests/__init__.py +0 -0
  9. venv/Lib/site-packages/pkg_resources/tests/data/my-test-package-source/setup.py +7 -0
  10. venv/Lib/site-packages/pkg_resources/tests/test_find_distributions.py +56 -0
  11. venv/Lib/site-packages/pkg_resources/tests/test_integration_zope_interface.py +54 -0
  12. venv/Lib/site-packages/pkg_resources/tests/test_markers.py +8 -0
  13. venv/Lib/site-packages/pkg_resources/tests/test_pkg_resources.py +485 -0
  14. venv/Lib/site-packages/pkg_resources/tests/test_resources.py +869 -0
  15. venv/Lib/site-packages/pkg_resources/tests/test_working_set.py +505 -0
  16. venv/Lib/site-packages/setuptools/__init__.py +248 -0
  17. venv/Lib/site-packages/setuptools/_core_metadata.py +337 -0
  18. venv/Lib/site-packages/setuptools/_discovery.py +33 -0
  19. venv/Lib/site-packages/setuptools/_distutils/__init__.py +14 -0
  20. venv/Lib/site-packages/setuptools/_distutils/_log.py +3 -0
  21. venv/Lib/site-packages/setuptools/_distutils/_macos_compat.py +12 -0
  22. venv/Lib/site-packages/setuptools/_distutils/_modified.py +95 -0
  23. venv/Lib/site-packages/setuptools/_distutils/_msvccompiler.py +16 -0
  24. venv/Lib/site-packages/setuptools/_distutils/archive_util.py +294 -0
  25. venv/Lib/site-packages/setuptools/_distutils/ccompiler.py +26 -0
  26. venv/Lib/site-packages/setuptools/_distutils/cmd.py +554 -0
  27. venv/Lib/site-packages/setuptools/_distutils/command/__init__.py +23 -0
  28. venv/Lib/site-packages/setuptools/_distutils/command/_framework_compat.py +54 -0
  29. venv/Lib/site-packages/setuptools/_distutils/command/bdist.py +167 -0
  30. venv/Lib/site-packages/setuptools/_distutils/command/bdist_dumb.py +141 -0
  31. venv/Lib/site-packages/setuptools/_distutils/command/bdist_rpm.py +598 -0
  32. venv/Lib/site-packages/setuptools/_distutils/command/build.py +156 -0
  33. venv/Lib/site-packages/setuptools/_distutils/command/build_clib.py +201 -0
  34. venv/Lib/site-packages/setuptools/_distutils/command/build_ext.py +812 -0
  35. venv/Lib/site-packages/setuptools/_distutils/command/build_py.py +407 -0
  36. venv/Lib/site-packages/setuptools/_distutils/command/build_scripts.py +160 -0
  37. venv/Lib/site-packages/setuptools/_distutils/command/check.py +152 -0
  38. venv/Lib/site-packages/setuptools/_distutils/command/clean.py +77 -0
  39. venv/Lib/site-packages/setuptools/_distutils/command/config.py +358 -0
  40. venv/Lib/site-packages/setuptools/_distutils/command/install.py +805 -0
  41. venv/Lib/site-packages/setuptools/_distutils/command/install_data.py +94 -0
  42. venv/Lib/site-packages/setuptools/_distutils/command/install_egg_info.py +91 -0
  43. venv/Lib/site-packages/setuptools/_distutils/command/install_headers.py +46 -0
  44. venv/Lib/site-packages/setuptools/_distutils/command/install_lib.py +238 -0
  45. venv/Lib/site-packages/setuptools/_distutils/command/install_scripts.py +62 -0
  46. venv/Lib/site-packages/setuptools/_distutils/command/sdist.py +521 -0
  47. venv/Lib/site-packages/setuptools/_distutils/compat/__init__.py +18 -0
  48. venv/Lib/site-packages/setuptools/_distutils/compat/numpy.py +2 -0
  49. venv/Lib/site-packages/setuptools/_distutils/compat/py39.py +66 -0
  50. venv/Lib/site-packages/setuptools/_distutils/compilers/C/base.py +1394 -0
  51. venv/Lib/site-packages/setuptools/_distutils/compilers/C/cygwin.py +340 -0
  52. venv/Lib/site-packages/setuptools/_distutils/compilers/C/errors.py +24 -0
  53. venv/Lib/site-packages/setuptools/_distutils/compilers/C/msvc.py +614 -0
  54. venv/Lib/site-packages/setuptools/_distutils/compilers/C/tests/test_base.py +83 -0
  55. venv/Lib/site-packages/setuptools/_distutils/compilers/C/tests/test_cygwin.py +76 -0
  56. venv/Lib/site-packages/setuptools/_distutils/compilers/C/tests/test_mingw.py +48 -0
  57. venv/Lib/site-packages/setuptools/_distutils/compilers/C/tests/test_msvc.py +136 -0
  58. venv/Lib/site-packages/setuptools/_distutils/compilers/C/tests/test_unix.py +413 -0
  59. venv/Lib/site-packages/setuptools/_distutils/compilers/C/unix.py +422 -0
  60. venv/Lib/site-packages/setuptools/_distutils/compilers/C/zos.py +230 -0
  61. venv/Lib/site-packages/setuptools/_distutils/core.py +289 -0
  62. venv/Lib/site-packages/setuptools/_distutils/cygwinccompiler.py +31 -0
  63. venv/Lib/site-packages/setuptools/_distutils/debug.py +5 -0
  64. venv/Lib/site-packages/setuptools/_distutils/dep_util.py +14 -0
  65. venv/Lib/site-packages/setuptools/_distutils/dir_util.py +244 -0
  66. venv/Lib/site-packages/setuptools/_distutils/dist.py +1386 -0
  67. venv/Lib/site-packages/setuptools/_distutils/errors.py +108 -0
  68. venv/Lib/site-packages/setuptools/_distutils/extension.py +258 -0
  69. venv/Lib/site-packages/setuptools/_distutils/fancy_getopt.py +471 -0
  70. venv/Lib/site-packages/setuptools/_distutils/file_util.py +236 -0
  71. venv/Lib/site-packages/setuptools/_distutils/filelist.py +431 -0
  72. venv/Lib/site-packages/setuptools/_distutils/log.py +56 -0
  73. venv/Lib/site-packages/setuptools/_distutils/spawn.py +134 -0
  74. venv/Lib/site-packages/setuptools/_distutils/sysconfig.py +598 -0
  75. venv/Lib/site-packages/setuptools/_distutils/tests/__init__.py +42 -0
  76. venv/Lib/site-packages/setuptools/_distutils/tests/compat/__init__.py +0 -0
  77. venv/Lib/site-packages/setuptools/_distutils/tests/compat/py39.py +40 -0
  78. venv/Lib/site-packages/setuptools/_distutils/tests/support.py +134 -0
  79. venv/Lib/site-packages/setuptools/_distutils/tests/test_archive_util.py +353 -0
  80. venv/Lib/site-packages/setuptools/_distutils/tests/test_bdist.py +47 -0
  81. venv/Lib/site-packages/setuptools/_distutils/tests/test_bdist_dumb.py +78 -0
  82. venv/Lib/site-packages/setuptools/_distutils/tests/test_bdist_rpm.py +127 -0
  83. venv/Lib/site-packages/setuptools/_distutils/tests/test_build.py +49 -0
  84. venv/Lib/site-packages/setuptools/_distutils/tests/test_build_clib.py +134 -0
  85. venv/Lib/site-packages/setuptools/_distutils/tests/test_build_ext.py +628 -0
  86. venv/Lib/site-packages/setuptools/_distutils/tests/test_build_py.py +196 -0
  87. venv/Lib/site-packages/setuptools/_distutils/tests/test_build_scripts.py +96 -0
  88. venv/Lib/site-packages/setuptools/_distutils/tests/test_check.py +194 -0
  89. venv/Lib/site-packages/setuptools/_distutils/tests/test_clean.py +45 -0
  90. venv/Lib/site-packages/setuptools/_distutils/tests/test_cmd.py +107 -0
  91. venv/Lib/site-packages/setuptools/_distutils/tests/test_config_cmd.py +87 -0
  92. venv/Lib/site-packages/setuptools/_distutils/tests/test_core.py +130 -0
  93. venv/Lib/site-packages/setuptools/_distutils/tests/test_dir_util.py +139 -0
  94. venv/Lib/site-packages/setuptools/_distutils/tests/test_dist.py +552 -0
  95. venv/Lib/site-packages/setuptools/_distutils/tests/test_extension.py +117 -0
  96. venv/Lib/site-packages/setuptools/_distutils/tests/test_file_util.py +95 -0
  97. venv/Lib/site-packages/setuptools/_distutils/tests/test_filelist.py +336 -0
  98. venv/Lib/site-packages/setuptools/_distutils/tests/test_install.py +245 -0
  99. venv/Lib/site-packages/setuptools/_distutils/tests/test_install_data.py +74 -0
  100. venv/Lib/site-packages/setuptools/_distutils/tests/test_install_headers.py +33 -0
  101. venv/Lib/site-packages/setuptools/_distutils/tests/test_install_lib.py +110 -0
  102. venv/Lib/site-packages/setuptools/_distutils/tests/test_install_scripts.py +52 -0
  103. venv/Lib/site-packages/setuptools/_distutils/tests/test_log.py +12 -0
  104. venv/Lib/site-packages/setuptools/_distutils/tests/test_modified.py +126 -0
  105. venv/Lib/site-packages/setuptools/_distutils/tests/test_sdist.py +470 -0
  106. venv/Lib/site-packages/setuptools/_distutils/tests/test_spawn.py +141 -0
  107. venv/Lib/site-packages/setuptools/_distutils/tests/test_sysconfig.py +319 -0
  108. venv/Lib/site-packages/setuptools/_distutils/tests/test_text_file.py +127 -0
  109. venv/Lib/site-packages/setuptools/_distutils/tests/test_util.py +243 -0
  110. venv/Lib/site-packages/setuptools/_distutils/tests/test_version.py +80 -0
  111. venv/Lib/site-packages/setuptools/_distutils/tests/test_versionpredicate.py +0 -0
  112. venv/Lib/site-packages/setuptools/_distutils/tests/unix_compat.py +17 -0
  113. venv/Lib/site-packages/setuptools/_distutils/text_file.py +286 -0
  114. venv/Lib/site-packages/setuptools/_distutils/unixccompiler.py +9 -0
  115. venv/Lib/site-packages/setuptools/_distutils/util.py +518 -0
  116. venv/Lib/site-packages/setuptools/_distutils/version.py +348 -0
  117. venv/Lib/site-packages/setuptools/_distutils/versionpredicate.py +175 -0
  118. venv/Lib/site-packages/setuptools/_distutils/zosccompiler.py +3 -0
  119. venv/Lib/site-packages/setuptools/_entry_points.py +94 -0
  120. venv/Lib/site-packages/setuptools/_imp.py +87 -0
  121. venv/Lib/site-packages/setuptools/_importlib.py +9 -0
  122. venv/Lib/site-packages/setuptools/_itertools.py +23 -0
  123. venv/Lib/site-packages/setuptools/_normalization.py +177 -0
  124. venv/Lib/site-packages/setuptools/_path.py +93 -0
  125. venv/Lib/site-packages/setuptools/_reqs.py +42 -0
  126. venv/Lib/site-packages/setuptools/_scripts.py +361 -0
  127. venv/Lib/site-packages/setuptools/_shutil.py +59 -0
  128. venv/Lib/site-packages/setuptools/_static.py +188 -0
  129. venv/Lib/site-packages/setuptools/_vendor/autocommand/__init__.py +27 -0
  130. venv/Lib/site-packages/setuptools/_vendor/autocommand/autoasync.py +142 -0
  131. venv/Lib/site-packages/setuptools/_vendor/autocommand/autocommand.py +70 -0
  132. venv/Lib/site-packages/setuptools/_vendor/autocommand/automain.py +59 -0
  133. venv/Lib/site-packages/setuptools/_vendor/autocommand/autoparse.py +333 -0
  134. venv/Lib/site-packages/setuptools/_vendor/autocommand/errors.py +23 -0
  135. venv/Lib/site-packages/setuptools/_vendor/backports/__init__.py +1 -0
  136. venv/Lib/site-packages/setuptools/_vendor/backports/tarfile/__init__.py +2937 -0
  137. venv/Lib/site-packages/setuptools/_vendor/backports/tarfile/__main__.py +5 -0
  138. venv/Lib/site-packages/setuptools/_vendor/backports/tarfile/compat/__init__.py +0 -0
  139. venv/Lib/site-packages/setuptools/_vendor/backports/tarfile/compat/py38.py +24 -0
  140. venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/__init__.py +1083 -0
  141. venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_adapters.py +83 -0
  142. venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_collections.py +30 -0
  143. venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_compat.py +57 -0
  144. venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_functools.py +104 -0
  145. venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_itertools.py +73 -0
  146. venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_meta.py +67 -0
  147. venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_text.py +99 -0
  148. venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/compat/__init__.py +0 -0
  149. venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/compat/py311.py +22 -0
  150. venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/compat/py39.py +36 -0
  151. venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/diagnose.py +21 -0
  152. venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/py.typed +0 -0
  153. venv/Lib/site-packages/setuptools/_vendor/inflect/__init__.py +3986 -0
  154. venv/Lib/site-packages/setuptools/_vendor/inflect/compat/__init__.py +0 -0
  155. venv/Lib/site-packages/setuptools/_vendor/inflect/compat/py38.py +7 -0
  156. venv/Lib/site-packages/setuptools/_vendor/inflect/py.typed +0 -0
  157. venv/Lib/site-packages/setuptools/_vendor/jaraco/collections/__init__.py +1091 -0
  158. venv/Lib/site-packages/setuptools/_vendor/jaraco/collections/py.typed +0 -0
  159. venv/Lib/site-packages/setuptools/_vendor/jaraco/context.py +361 -0
  160. venv/Lib/site-packages/setuptools/_vendor/jaraco/functools/__init__.py +633 -0
  161. venv/Lib/site-packages/setuptools/_vendor/jaraco/functools/__init__.pyi +125 -0
  162. venv/Lib/site-packages/setuptools/_vendor/jaraco/functools/py.typed +0 -0
  163. venv/Lib/site-packages/setuptools/_vendor/jaraco/text/__init__.py +624 -0
  164. venv/Lib/site-packages/setuptools/_vendor/jaraco/text/layouts.py +25 -0
  165. venv/Lib/site-packages/setuptools/_vendor/jaraco/text/show-newlines.py +33 -0
  166. venv/Lib/site-packages/setuptools/_vendor/jaraco/text/strip-prefix.py +21 -0
  167. venv/Lib/site-packages/setuptools/_vendor/jaraco/text/to-dvorak.py +6 -0
  168. venv/Lib/site-packages/setuptools/_vendor/jaraco/text/to-qwerty.py +6 -0
  169. venv/Lib/site-packages/setuptools/_vendor/more_itertools/__init__.py +6 -0
  170. venv/Lib/site-packages/setuptools/_vendor/more_itertools/__init__.pyi +2 -0
  171. venv/Lib/site-packages/setuptools/_vendor/more_itertools/more.py +4806 -0
  172. venv/Lib/site-packages/setuptools/_vendor/more_itertools/more.pyi +709 -0
  173. venv/Lib/site-packages/setuptools/_vendor/more_itertools/py.typed +0 -0
  174. venv/Lib/site-packages/setuptools/_vendor/more_itertools/recipes.py +1046 -0
  175. venv/Lib/site-packages/setuptools/_vendor/more_itertools/recipes.pyi +136 -0
  176. venv/Lib/site-packages/setuptools/_vendor/packaging/__init__.py +15 -0
  177. venv/Lib/site-packages/setuptools/_vendor/packaging/_elffile.py +110 -0
  178. venv/Lib/site-packages/setuptools/_vendor/packaging/_manylinux.py +263 -0
  179. venv/Lib/site-packages/setuptools/_vendor/packaging/_musllinux.py +85 -0
  180. venv/Lib/site-packages/setuptools/_vendor/packaging/_parser.py +354 -0
  181. venv/Lib/site-packages/setuptools/_vendor/packaging/_structures.py +61 -0
  182. venv/Lib/site-packages/setuptools/_vendor/packaging/_tokenizer.py +194 -0
  183. venv/Lib/site-packages/setuptools/_vendor/packaging/licenses/__init__.py +145 -0
  184. venv/Lib/site-packages/setuptools/_vendor/packaging/licenses/_spdx.py +759 -0
  185. venv/Lib/site-packages/setuptools/_vendor/packaging/markers.py +331 -0
  186. venv/Lib/site-packages/setuptools/_vendor/packaging/metadata.py +863 -0
  187. venv/Lib/site-packages/setuptools/_vendor/packaging/py.typed +0 -0
  188. venv/Lib/site-packages/setuptools/_vendor/packaging/requirements.py +91 -0
  189. venv/Lib/site-packages/setuptools/_vendor/packaging/specifiers.py +1020 -0
  190. venv/Lib/site-packages/setuptools/_vendor/packaging/tags.py +617 -0
  191. venv/Lib/site-packages/setuptools/_vendor/packaging/utils.py +163 -0
  192. venv/Lib/site-packages/setuptools/_vendor/packaging/version.py +582 -0
  193. venv/Lib/site-packages/setuptools/_vendor/platformdirs/__init__.py +627 -0
  194. venv/Lib/site-packages/setuptools/_vendor/platformdirs/__main__.py +55 -0
  195. venv/Lib/site-packages/setuptools/_vendor/platformdirs/android.py +249 -0
  196. venv/Lib/site-packages/setuptools/_vendor/platformdirs/api.py +292 -0
  197. venv/Lib/site-packages/setuptools/_vendor/platformdirs/macos.py +130 -0
  198. venv/Lib/site-packages/setuptools/_vendor/platformdirs/py.typed +0 -0
  199. venv/Lib/site-packages/setuptools/_vendor/platformdirs/unix.py +275 -0
  200. venv/Lib/site-packages/setuptools/_vendor/platformdirs/version.py +16 -0
  201. venv/Lib/site-packages/setuptools/_vendor/platformdirs/windows.py +272 -0
  202. venv/Lib/site-packages/setuptools/_vendor/tomli/__init__.py +11 -0
  203. venv/Lib/site-packages/setuptools/_vendor/tomli/_parser.py +691 -0
  204. venv/Lib/site-packages/setuptools/_vendor/tomli/_re.py +107 -0
  205. venv/Lib/site-packages/setuptools/_vendor/tomli/_types.py +10 -0
  206. venv/Lib/site-packages/setuptools/_vendor/tomli/py.typed +1 -0
  207. venv/Lib/site-packages/setuptools/_vendor/typeguard/__init__.py +48 -0
  208. venv/Lib/site-packages/setuptools/_vendor/typeguard/_checkers.py +993 -0
  209. venv/Lib/site-packages/setuptools/_vendor/typeguard/_config.py +108 -0
  210. venv/Lib/site-packages/setuptools/_vendor/typeguard/_decorators.py +235 -0
  211. venv/Lib/site-packages/setuptools/_vendor/typeguard/_exceptions.py +42 -0
  212. venv/Lib/site-packages/setuptools/_vendor/typeguard/_functions.py +308 -0
  213. venv/Lib/site-packages/setuptools/_vendor/typeguard/_importhook.py +213 -0
  214. venv/Lib/site-packages/setuptools/_vendor/typeguard/_memo.py +48 -0
  215. venv/Lib/site-packages/setuptools/_vendor/typeguard/_pytest_plugin.py +127 -0
  216. venv/Lib/site-packages/setuptools/_vendor/typeguard/_suppression.py +86 -0
  217. venv/Lib/site-packages/setuptools/_vendor/typeguard/_transformer.py +1229 -0
  218. venv/Lib/site-packages/setuptools/_vendor/typeguard/_union_transformer.py +55 -0
  219. venv/Lib/site-packages/setuptools/_vendor/typeguard/_utils.py +173 -0
  220. venv/Lib/site-packages/setuptools/_vendor/typeguard/py.typed +0 -0
  221. venv/Lib/site-packages/setuptools/_vendor/typing_extensions.py +3641 -0
  222. venv/Lib/site-packages/setuptools/_vendor/wheel/__init__.py +3 -0
  223. venv/Lib/site-packages/setuptools/_vendor/wheel/__main__.py +23 -0
  224. venv/Lib/site-packages/setuptools/_vendor/wheel/_bdist_wheel.py +613 -0
  225. venv/Lib/site-packages/setuptools/_vendor/wheel/_setuptools_logging.py +26 -0
  226. venv/Lib/site-packages/setuptools/_vendor/wheel/bdist_wheel.py +26 -0
  227. venv/Lib/site-packages/setuptools/_vendor/wheel/cli/__init__.py +155 -0
  228. venv/Lib/site-packages/setuptools/_vendor/wheel/cli/convert.py +332 -0
  229. venv/Lib/site-packages/setuptools/_vendor/wheel/cli/pack.py +85 -0
  230. venv/Lib/site-packages/setuptools/_vendor/wheel/cli/tags.py +139 -0
  231. venv/Lib/site-packages/setuptools/_vendor/wheel/cli/unpack.py +30 -0
  232. venv/Lib/site-packages/setuptools/_vendor/wheel/macosx_libfile.py +482 -0
  233. venv/Lib/site-packages/setuptools/_vendor/wheel/metadata.py +183 -0
  234. venv/Lib/site-packages/setuptools/_vendor/wheel/util.py +17 -0
  235. venv/Lib/site-packages/setuptools/_vendor/wheel/vendored/__init__.py +0 -0
  236. venv/Lib/site-packages/setuptools/_vendor/wheel/vendored/packaging/__init__.py +0 -0
  237. venv/Lib/site-packages/setuptools/_vendor/wheel/vendored/packaging/_elffile.py +108 -0
  238. venv/Lib/site-packages/setuptools/_vendor/wheel/vendored/packaging/_manylinux.py +260 -0
  239. venv/Lib/site-packages/setuptools/_vendor/wheel/vendored/packaging/_musllinux.py +83 -0
  240. venv/Lib/site-packages/setuptools/_vendor/wheel/vendored/packaging/_parser.py +356 -0
  241. venv/Lib/site-packages/setuptools/_vendor/wheel/vendored/packaging/_structures.py +61 -0
  242. venv/Lib/site-packages/setuptools/_vendor/wheel/vendored/packaging/_tokenizer.py +192 -0
  243. venv/Lib/site-packages/setuptools/_vendor/wheel/vendored/packaging/markers.py +253 -0
  244. venv/Lib/site-packages/setuptools/_vendor/wheel/vendored/packaging/requirements.py +90 -0
  245. venv/Lib/site-packages/setuptools/_vendor/wheel/vendored/packaging/specifiers.py +1011 -0
  246. venv/Lib/site-packages/setuptools/_vendor/wheel/vendored/packaging/tags.py +571 -0
  247. venv/Lib/site-packages/setuptools/_vendor/wheel/vendored/packaging/utils.py +172 -0
  248. venv/Lib/site-packages/setuptools/_vendor/wheel/vendored/packaging/version.py +561 -0
  249. venv/Lib/site-packages/setuptools/_vendor/wheel/wheelfile.py +227 -0
  250. venv/Lib/site-packages/setuptools/_vendor/zipp/__init__.py +501 -0
  251. venv/Lib/site-packages/setuptools/_vendor/zipp/compat/__init__.py +0 -0
  252. venv/Lib/site-packages/setuptools/_vendor/zipp/compat/py310.py +11 -0
  253. venv/Lib/site-packages/setuptools/_vendor/zipp/glob.py +106 -0
  254. venv/Lib/site-packages/setuptools/archive_util.py +219 -0
  255. venv/Lib/site-packages/setuptools/build_meta.py +548 -0
  256. venv/Lib/site-packages/setuptools/command/__init__.py +21 -0
  257. venv/Lib/site-packages/setuptools/command/_requirestxt.py +131 -0
  258. venv/Lib/site-packages/setuptools/command/alias.py +77 -0
  259. venv/Lib/site-packages/setuptools/command/bdist_egg.py +477 -0
  260. venv/Lib/site-packages/setuptools/command/bdist_rpm.py +42 -0
  261. venv/Lib/site-packages/setuptools/command/bdist_wheel.py +604 -0
  262. venv/Lib/site-packages/setuptools/command/build.py +135 -0
  263. venv/Lib/site-packages/setuptools/command/build_clib.py +103 -0
  264. venv/Lib/site-packages/setuptools/command/build_ext.py +470 -0
  265. venv/Lib/site-packages/setuptools/command/build_py.py +400 -0
  266. venv/Lib/site-packages/setuptools/command/develop.py +55 -0
  267. venv/Lib/site-packages/setuptools/command/dist_info.py +103 -0
  268. venv/Lib/site-packages/setuptools/command/easy_install.py +30 -0
  269. venv/Lib/site-packages/setuptools/command/editable_wheel.py +908 -0
  270. venv/Lib/site-packages/setuptools/command/egg_info.py +718 -0
  271. venv/Lib/site-packages/setuptools/command/install.py +131 -0
  272. venv/Lib/site-packages/setuptools/command/install_egg_info.py +58 -0
  273. venv/Lib/site-packages/setuptools/command/install_lib.py +137 -0
  274. venv/Lib/site-packages/setuptools/command/install_scripts.py +67 -0
  275. venv/Lib/site-packages/setuptools/command/rotate.py +65 -0
  276. venv/Lib/site-packages/setuptools/command/saveopts.py +21 -0
  277. venv/Lib/site-packages/setuptools/command/sdist.py +217 -0
  278. venv/Lib/site-packages/setuptools/command/setopt.py +141 -0
  279. venv/Lib/site-packages/setuptools/command/test.py +45 -0
  280. venv/Lib/site-packages/setuptools/compat/__init__.py +0 -0
  281. venv/Lib/site-packages/setuptools/compat/py310.py +20 -0
  282. venv/Lib/site-packages/setuptools/compat/py311.py +27 -0
  283. venv/Lib/site-packages/setuptools/compat/py312.py +13 -0
  284. venv/Lib/site-packages/setuptools/compat/py39.py +9 -0
  285. venv/Lib/site-packages/setuptools/config/__init__.py +43 -0
  286. venv/Lib/site-packages/setuptools/config/_apply_pyprojecttoml.py +526 -0
  287. venv/Lib/site-packages/setuptools/config/_validate_pyproject/__init__.py +34 -0
  288. venv/Lib/site-packages/setuptools/config/_validate_pyproject/error_reporting.py +336 -0
  289. venv/Lib/site-packages/setuptools/config/_validate_pyproject/extra_validations.py +82 -0
  290. venv/Lib/site-packages/setuptools/config/_validate_pyproject/fastjsonschema_exceptions.py +51 -0
  291. venv/Lib/site-packages/setuptools/config/_validate_pyproject/fastjsonschema_validations.py +1412 -0
  292. venv/Lib/site-packages/setuptools/config/_validate_pyproject/formats.py +402 -0
  293. venv/Lib/site-packages/setuptools/config/expand.py +452 -0
  294. venv/Lib/site-packages/setuptools/config/pyprojecttoml.py +468 -0
  295. venv/Lib/site-packages/setuptools/config/setupcfg.py +780 -0
  296. venv/Lib/site-packages/setuptools/depends.py +185 -0
  297. venv/Lib/site-packages/setuptools/discovery.py +614 -0
  298. venv/Lib/site-packages/setuptools/dist.py +1119 -0
  299. venv/Lib/site-packages/setuptools/errors.py +67 -0
  300. venv/Lib/site-packages/setuptools/extension.py +177 -0
  301. venv/Lib/site-packages/setuptools/glob.py +185 -0
  302. venv/Lib/site-packages/setuptools/installer.py +155 -0
  303. venv/Lib/site-packages/setuptools/launch.py +36 -0
  304. venv/Lib/site-packages/setuptools/logging.py +40 -0
  305. venv/Lib/site-packages/setuptools/modified.py +18 -0
  306. venv/Lib/site-packages/setuptools/monkey.py +126 -0
  307. venv/Lib/site-packages/setuptools/msvc.py +1536 -0
  308. venv/Lib/site-packages/setuptools/namespaces.py +106 -0
  309. venv/Lib/site-packages/setuptools/tests/__init__.py +13 -0
  310. venv/Lib/site-packages/setuptools/tests/compat/__init__.py +0 -0
  311. venv/Lib/site-packages/setuptools/tests/compat/py39.py +3 -0
  312. venv/Lib/site-packages/setuptools/tests/config/__init__.py +0 -0
  313. venv/Lib/site-packages/setuptools/tests/config/downloads/__init__.py +59 -0
  314. venv/Lib/site-packages/setuptools/tests/config/downloads/preload.py +18 -0
  315. venv/Lib/site-packages/setuptools/tests/config/test_apply_pyprojecttoml.py +772 -0
  316. venv/Lib/site-packages/setuptools/tests/config/test_expand.py +247 -0
  317. venv/Lib/site-packages/setuptools/tests/config/test_pyprojecttoml.py +396 -0
  318. venv/Lib/site-packages/setuptools/tests/config/test_pyprojecttoml_dynamic_deps.py +109 -0
  319. venv/Lib/site-packages/setuptools/tests/config/test_setupcfg.py +980 -0
  320. venv/Lib/site-packages/setuptools/tests/contexts.py +131 -0
  321. venv/Lib/site-packages/setuptools/tests/environment.py +95 -0
  322. venv/Lib/site-packages/setuptools/tests/fixtures.py +392 -0
  323. venv/Lib/site-packages/setuptools/tests/integration/__init__.py +0 -0
  324. venv/Lib/site-packages/setuptools/tests/integration/helpers.py +77 -0
  325. venv/Lib/site-packages/setuptools/tests/integration/test_pbr.py +20 -0
  326. venv/Lib/site-packages/setuptools/tests/integration/test_pip_install_sdist.py +223 -0
  327. venv/Lib/site-packages/setuptools/tests/mod_with_constant.py +1 -0
  328. venv/Lib/site-packages/setuptools/tests/namespaces.py +90 -0
  329. venv/Lib/site-packages/setuptools/tests/script-with-bom.py +1 -0
  330. venv/Lib/site-packages/setuptools/tests/test_archive_util.py +36 -0
  331. venv/Lib/site-packages/setuptools/tests/test_bdist_deprecations.py +28 -0
  332. venv/Lib/site-packages/setuptools/tests/test_bdist_egg.py +73 -0
  333. venv/Lib/site-packages/setuptools/tests/test_bdist_wheel.py +708 -0
  334. venv/Lib/site-packages/setuptools/tests/test_build.py +33 -0
  335. venv/Lib/site-packages/setuptools/tests/test_build_clib.py +84 -0
  336. venv/Lib/site-packages/setuptools/tests/test_build_ext.py +293 -0
  337. venv/Lib/site-packages/setuptools/tests/test_build_meta.py +959 -0
  338. venv/Lib/site-packages/setuptools/tests/test_build_py.py +480 -0
  339. venv/Lib/site-packages/setuptools/tests/test_config_discovery.py +647 -0
  340. venv/Lib/site-packages/setuptools/tests/test_core_metadata.py +622 -0
  341. venv/Lib/site-packages/setuptools/tests/test_depends.py +15 -0
  342. venv/Lib/site-packages/setuptools/tests/test_develop.py +112 -0
  343. venv/Lib/site-packages/setuptools/tests/test_dist.py +278 -0
  344. venv/Lib/site-packages/setuptools/tests/test_dist_info.py +147 -0
  345. venv/Lib/site-packages/setuptools/tests/test_distutils_adoption.py +198 -0
  346. venv/Lib/site-packages/setuptools/tests/test_editable_install.py +1263 -0
  347. venv/Lib/site-packages/setuptools/tests/test_egg_info.py +1306 -0
  348. venv/Lib/site-packages/setuptools/tests/test_extern.py +15 -0
  349. venv/Lib/site-packages/setuptools/tests/test_find_packages.py +218 -0
  350. venv/Lib/site-packages/setuptools/tests/test_find_py_modules.py +73 -0
  351. venv/Lib/site-packages/setuptools/tests/test_glob.py +45 -0
  352. venv/Lib/site-packages/setuptools/tests/test_install_scripts.py +89 -0
  353. venv/Lib/site-packages/setuptools/tests/test_logging.py +76 -0
  354. venv/Lib/site-packages/setuptools/tests/test_manifest.py +622 -0
  355. venv/Lib/site-packages/setuptools/tests/test_namespaces.py +138 -0
  356. venv/Lib/site-packages/setuptools/tests/test_scripts.py +12 -0
  357. venv/Lib/site-packages/setuptools/tests/test_sdist.py +984 -0
  358. venv/Lib/site-packages/setuptools/tests/test_setopt.py +40 -0
  359. venv/Lib/site-packages/setuptools/tests/test_setuptools.py +290 -0
  360. venv/Lib/site-packages/setuptools/tests/test_shutil_wrapper.py +23 -0
  361. venv/Lib/site-packages/setuptools/tests/test_unicode_utils.py +10 -0
  362. venv/Lib/site-packages/setuptools/tests/test_virtualenv.py +113 -0
  363. venv/Lib/site-packages/setuptools/tests/test_warnings.py +106 -0
  364. venv/Lib/site-packages/setuptools/tests/test_wheel.py +690 -0
  365. venv/Lib/site-packages/setuptools/tests/test_windows_wrappers.py +258 -0
  366. venv/Lib/site-packages/setuptools/tests/text.py +4 -0
  367. venv/Lib/site-packages/setuptools/tests/textwrap.py +6 -0
  368. venv/Lib/site-packages/setuptools/unicode_utils.py +102 -0
  369. venv/Lib/site-packages/setuptools/version.py +6 -0
  370. venv/Lib/site-packages/setuptools/warnings.py +110 -0
  371. venv/Lib/site-packages/setuptools/wheel.py +261 -0
  372. venv/Lib/site-packages/setuptools/windows_support.py +30 -0
  373. {android_notify-1.60.0.dist-info → android_notify-1.60.2.dist-info}/WHEEL +0 -0
  374. {android_notify-1.60.0.dist-info → android_notify-1.60.2.dist-info}/entry_points.txt +0 -0
  375. {android_notify-1.60.0.dist-info → android_notify-1.60.2.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,3986 @@
1
+ """
2
+ inflect: english language inflection
3
+ - correctly generate plurals, ordinals, indefinite articles
4
+ - convert numbers to words
5
+
6
+ Copyright (C) 2010 Paul Dyson
7
+
8
+ Based upon the Perl module
9
+ `Lingua::EN::Inflect <https://metacpan.org/pod/Lingua::EN::Inflect>`_.
10
+
11
+ methods:
12
+ classical inflect
13
+ plural plural_noun plural_verb plural_adj singular_noun no num a an
14
+ compare compare_nouns compare_verbs compare_adjs
15
+ present_participle
16
+ ordinal
17
+ number_to_words
18
+ join
19
+ defnoun defverb defadj defa defan
20
+
21
+ INFLECTIONS:
22
+ classical inflect
23
+ plural plural_noun plural_verb plural_adj singular_noun compare
24
+ no num a an present_participle
25
+
26
+ PLURALS:
27
+ classical inflect
28
+ plural plural_noun plural_verb plural_adj singular_noun no num
29
+ compare compare_nouns compare_verbs compare_adjs
30
+
31
+ COMPARISONS:
32
+ classical
33
+ compare compare_nouns compare_verbs compare_adjs
34
+
35
+ ARTICLES:
36
+ classical inflect num a an
37
+
38
+ NUMERICAL:
39
+ ordinal number_to_words
40
+
41
+ USER_DEFINED:
42
+ defnoun defverb defadj defa defan
43
+
44
+ Exceptions:
45
+ UnknownClassicalModeError
46
+ BadNumValueError
47
+ BadChunkingOptionError
48
+ NumOutOfRangeError
49
+ BadUserDefinedPatternError
50
+ BadRcFileError
51
+ BadGenderError
52
+
53
+ """
54
+
55
+ from __future__ import annotations
56
+
57
+ import ast
58
+ import collections
59
+ import contextlib
60
+ import functools
61
+ import itertools
62
+ import re
63
+ from numbers import Number
64
+ from typing import (
65
+ TYPE_CHECKING,
66
+ Any,
67
+ Callable,
68
+ Dict,
69
+ Iterable,
70
+ List,
71
+ Literal,
72
+ Match,
73
+ Optional,
74
+ Sequence,
75
+ Tuple,
76
+ Union,
77
+ cast,
78
+ )
79
+
80
+ from more_itertools import windowed_complete
81
+ from typeguard import typechecked
82
+
83
+ from .compat.py38 import Annotated
84
+
85
+
86
+ class UnknownClassicalModeError(Exception):
87
+ pass
88
+
89
+
90
+ class BadNumValueError(Exception):
91
+ pass
92
+
93
+
94
+ class BadChunkingOptionError(Exception):
95
+ pass
96
+
97
+
98
+ class NumOutOfRangeError(Exception):
99
+ pass
100
+
101
+
102
+ class BadUserDefinedPatternError(Exception):
103
+ pass
104
+
105
+
106
+ class BadRcFileError(Exception):
107
+ pass
108
+
109
+
110
+ class BadGenderError(Exception):
111
+ pass
112
+
113
+
114
+ def enclose(s: str) -> str:
115
+ return f"(?:{s})"
116
+
117
+
118
+ def joinstem(cutpoint: Optional[int] = 0, words: Optional[Iterable[str]] = None) -> str:
119
+ """
120
+ Join stem of each word in words into a string for regex.
121
+
122
+ Each word is truncated at cutpoint.
123
+
124
+ Cutpoint is usually negative indicating the number of letters to remove
125
+ from the end of each word.
126
+
127
+ >>> joinstem(-2, ["ephemeris", "iris", ".*itis"])
128
+ '(?:ephemer|ir|.*it)'
129
+
130
+ >>> joinstem(None, ["ephemeris"])
131
+ '(?:ephemeris)'
132
+
133
+ >>> joinstem(5, None)
134
+ '(?:)'
135
+ """
136
+ return enclose("|".join(w[:cutpoint] for w in words or []))
137
+
138
+
139
+ def bysize(words: Iterable[str]) -> Dict[int, set]:
140
+ """
141
+ From a list of words, return a dict of sets sorted by word length.
142
+
143
+ >>> words = ['ant', 'cat', 'dog', 'pig', 'frog', 'goat', 'horse', 'elephant']
144
+ >>> ret = bysize(words)
145
+ >>> sorted(ret[3])
146
+ ['ant', 'cat', 'dog', 'pig']
147
+ >>> ret[5]
148
+ {'horse'}
149
+ """
150
+ res: Dict[int, set] = collections.defaultdict(set)
151
+ for w in words:
152
+ res[len(w)].add(w)
153
+ return res
154
+
155
+
156
+ def make_pl_si_lists(
157
+ lst: Iterable[str],
158
+ plending: str,
159
+ siendingsize: Optional[int],
160
+ dojoinstem: bool = True,
161
+ ):
162
+ """
163
+ given a list of singular words: lst
164
+
165
+ an ending to append to make the plural: plending
166
+
167
+ the number of characters to remove from the singular
168
+ before appending plending: siendingsize
169
+
170
+ a flag whether to create a joinstem: dojoinstem
171
+
172
+ return:
173
+ a list of pluralised words: si_list (called si because this is what you need to
174
+ look for to make the singular)
175
+
176
+ the pluralised words as a dict of sets sorted by word length: si_bysize
177
+ the singular words as a dict of sets sorted by word length: pl_bysize
178
+ if dojoinstem is True: a regular expression that matches any of the stems: stem
179
+ """
180
+ if siendingsize is not None:
181
+ siendingsize = -siendingsize
182
+ si_list = [w[:siendingsize] + plending for w in lst]
183
+ pl_bysize = bysize(lst)
184
+ si_bysize = bysize(si_list)
185
+ if dojoinstem:
186
+ stem = joinstem(siendingsize, lst)
187
+ return si_list, si_bysize, pl_bysize, stem
188
+ else:
189
+ return si_list, si_bysize, pl_bysize
190
+
191
+
192
+ # 1. PLURALS
193
+
194
+ pl_sb_irregular_s = {
195
+ "corpus": "corpuses|corpora",
196
+ "opus": "opuses|opera",
197
+ "genus": "genera",
198
+ "mythos": "mythoi",
199
+ "penis": "penises|penes",
200
+ "testis": "testes",
201
+ "atlas": "atlases|atlantes",
202
+ "yes": "yeses",
203
+ }
204
+
205
+ pl_sb_irregular = {
206
+ "child": "children",
207
+ "chili": "chilis|chilies",
208
+ "brother": "brothers|brethren",
209
+ "infinity": "infinities|infinity",
210
+ "loaf": "loaves",
211
+ "lore": "lores|lore",
212
+ "hoof": "hoofs|hooves",
213
+ "beef": "beefs|beeves",
214
+ "thief": "thiefs|thieves",
215
+ "money": "monies",
216
+ "mongoose": "mongooses",
217
+ "ox": "oxen",
218
+ "cow": "cows|kine",
219
+ "graffito": "graffiti",
220
+ "octopus": "octopuses|octopodes",
221
+ "genie": "genies|genii",
222
+ "ganglion": "ganglions|ganglia",
223
+ "trilby": "trilbys",
224
+ "turf": "turfs|turves",
225
+ "numen": "numina",
226
+ "atman": "atmas",
227
+ "occiput": "occiputs|occipita",
228
+ "sabretooth": "sabretooths",
229
+ "sabertooth": "sabertooths",
230
+ "lowlife": "lowlifes",
231
+ "flatfoot": "flatfoots",
232
+ "tenderfoot": "tenderfoots",
233
+ "romany": "romanies",
234
+ "jerry": "jerries",
235
+ "mary": "maries",
236
+ "talouse": "talouses",
237
+ "rom": "roma",
238
+ "carmen": "carmina",
239
+ }
240
+
241
+ pl_sb_irregular.update(pl_sb_irregular_s)
242
+ # pl_sb_irregular_keys = enclose('|'.join(pl_sb_irregular.keys()))
243
+
244
+ pl_sb_irregular_caps = {
245
+ "Romany": "Romanies",
246
+ "Jerry": "Jerrys",
247
+ "Mary": "Marys",
248
+ "Rom": "Roma",
249
+ }
250
+
251
+ pl_sb_irregular_compound = {"prima donna": "prima donnas|prime donne"}
252
+
253
+ si_sb_irregular = {v: k for (k, v) in pl_sb_irregular.items()}
254
+ for k in list(si_sb_irregular):
255
+ if "|" in k:
256
+ k1, k2 = k.split("|")
257
+ si_sb_irregular[k1] = si_sb_irregular[k2] = si_sb_irregular[k]
258
+ del si_sb_irregular[k]
259
+ si_sb_irregular_caps = {v: k for (k, v) in pl_sb_irregular_caps.items()}
260
+ si_sb_irregular_compound = {v: k for (k, v) in pl_sb_irregular_compound.items()}
261
+ for k in list(si_sb_irregular_compound):
262
+ if "|" in k:
263
+ k1, k2 = k.split("|")
264
+ si_sb_irregular_compound[k1] = si_sb_irregular_compound[k2] = (
265
+ si_sb_irregular_compound[k]
266
+ )
267
+ del si_sb_irregular_compound[k]
268
+
269
+ # si_sb_irregular_keys = enclose('|'.join(si_sb_irregular.keys()))
270
+
271
+ # Z's that don't double
272
+
273
+ pl_sb_z_zes_list = ("quartz", "topaz")
274
+ pl_sb_z_zes_bysize = bysize(pl_sb_z_zes_list)
275
+
276
+ pl_sb_ze_zes_list = ("snooze",)
277
+ pl_sb_ze_zes_bysize = bysize(pl_sb_ze_zes_list)
278
+
279
+
280
+ # CLASSICAL "..is" -> "..ides"
281
+
282
+ pl_sb_C_is_ides_complete = [
283
+ # GENERAL WORDS...
284
+ "ephemeris",
285
+ "iris",
286
+ "clitoris",
287
+ "chrysalis",
288
+ "epididymis",
289
+ ]
290
+
291
+ pl_sb_C_is_ides_endings = [
292
+ # INFLAMATIONS...
293
+ "itis"
294
+ ]
295
+
296
+ pl_sb_C_is_ides = joinstem(
297
+ -2, pl_sb_C_is_ides_complete + [f".*{w}" for w in pl_sb_C_is_ides_endings]
298
+ )
299
+
300
+ pl_sb_C_is_ides_list = pl_sb_C_is_ides_complete + pl_sb_C_is_ides_endings
301
+
302
+ (
303
+ si_sb_C_is_ides_list,
304
+ si_sb_C_is_ides_bysize,
305
+ pl_sb_C_is_ides_bysize,
306
+ ) = make_pl_si_lists(pl_sb_C_is_ides_list, "ides", 2, dojoinstem=False)
307
+
308
+
309
+ # CLASSICAL "..a" -> "..ata"
310
+
311
+ pl_sb_C_a_ata_list = (
312
+ "anathema",
313
+ "bema",
314
+ "carcinoma",
315
+ "charisma",
316
+ "diploma",
317
+ "dogma",
318
+ "drama",
319
+ "edema",
320
+ "enema",
321
+ "enigma",
322
+ "lemma",
323
+ "lymphoma",
324
+ "magma",
325
+ "melisma",
326
+ "miasma",
327
+ "oedema",
328
+ "sarcoma",
329
+ "schema",
330
+ "soma",
331
+ "stigma",
332
+ "stoma",
333
+ "trauma",
334
+ "gumma",
335
+ "pragma",
336
+ )
337
+
338
+ (
339
+ si_sb_C_a_ata_list,
340
+ si_sb_C_a_ata_bysize,
341
+ pl_sb_C_a_ata_bysize,
342
+ pl_sb_C_a_ata,
343
+ ) = make_pl_si_lists(pl_sb_C_a_ata_list, "ata", 1)
344
+
345
+ # UNCONDITIONAL "..a" -> "..ae"
346
+
347
+ pl_sb_U_a_ae_list = (
348
+ "alumna",
349
+ "alga",
350
+ "vertebra",
351
+ "persona",
352
+ "vita",
353
+ )
354
+ (
355
+ si_sb_U_a_ae_list,
356
+ si_sb_U_a_ae_bysize,
357
+ pl_sb_U_a_ae_bysize,
358
+ pl_sb_U_a_ae,
359
+ ) = make_pl_si_lists(pl_sb_U_a_ae_list, "e", None)
360
+
361
+ # CLASSICAL "..a" -> "..ae"
362
+
363
+ pl_sb_C_a_ae_list = (
364
+ "amoeba",
365
+ "antenna",
366
+ "formula",
367
+ "hyperbola",
368
+ "medusa",
369
+ "nebula",
370
+ "parabola",
371
+ "abscissa",
372
+ "hydra",
373
+ "nova",
374
+ "lacuna",
375
+ "aurora",
376
+ "umbra",
377
+ "flora",
378
+ "fauna",
379
+ )
380
+ (
381
+ si_sb_C_a_ae_list,
382
+ si_sb_C_a_ae_bysize,
383
+ pl_sb_C_a_ae_bysize,
384
+ pl_sb_C_a_ae,
385
+ ) = make_pl_si_lists(pl_sb_C_a_ae_list, "e", None)
386
+
387
+
388
+ # CLASSICAL "..en" -> "..ina"
389
+
390
+ pl_sb_C_en_ina_list = ("stamen", "foramen", "lumen")
391
+
392
+ (
393
+ si_sb_C_en_ina_list,
394
+ si_sb_C_en_ina_bysize,
395
+ pl_sb_C_en_ina_bysize,
396
+ pl_sb_C_en_ina,
397
+ ) = make_pl_si_lists(pl_sb_C_en_ina_list, "ina", 2)
398
+
399
+
400
+ # UNCONDITIONAL "..um" -> "..a"
401
+
402
+ pl_sb_U_um_a_list = (
403
+ "bacterium",
404
+ "agendum",
405
+ "desideratum",
406
+ "erratum",
407
+ "stratum",
408
+ "datum",
409
+ "ovum",
410
+ "extremum",
411
+ "candelabrum",
412
+ )
413
+ (
414
+ si_sb_U_um_a_list,
415
+ si_sb_U_um_a_bysize,
416
+ pl_sb_U_um_a_bysize,
417
+ pl_sb_U_um_a,
418
+ ) = make_pl_si_lists(pl_sb_U_um_a_list, "a", 2)
419
+
420
+ # CLASSICAL "..um" -> "..a"
421
+
422
+ pl_sb_C_um_a_list = (
423
+ "maximum",
424
+ "minimum",
425
+ "momentum",
426
+ "optimum",
427
+ "quantum",
428
+ "cranium",
429
+ "curriculum",
430
+ "dictum",
431
+ "phylum",
432
+ "aquarium",
433
+ "compendium",
434
+ "emporium",
435
+ "encomium",
436
+ "gymnasium",
437
+ "honorarium",
438
+ "interregnum",
439
+ "lustrum",
440
+ "memorandum",
441
+ "millennium",
442
+ "rostrum",
443
+ "spectrum",
444
+ "speculum",
445
+ "stadium",
446
+ "trapezium",
447
+ "ultimatum",
448
+ "medium",
449
+ "vacuum",
450
+ "velum",
451
+ "consortium",
452
+ "arboretum",
453
+ )
454
+
455
+ (
456
+ si_sb_C_um_a_list,
457
+ si_sb_C_um_a_bysize,
458
+ pl_sb_C_um_a_bysize,
459
+ pl_sb_C_um_a,
460
+ ) = make_pl_si_lists(pl_sb_C_um_a_list, "a", 2)
461
+
462
+
463
+ # UNCONDITIONAL "..us" -> "i"
464
+
465
+ pl_sb_U_us_i_list = (
466
+ "alumnus",
467
+ "alveolus",
468
+ "bacillus",
469
+ "bronchus",
470
+ "locus",
471
+ "nucleus",
472
+ "stimulus",
473
+ "meniscus",
474
+ "sarcophagus",
475
+ )
476
+ (
477
+ si_sb_U_us_i_list,
478
+ si_sb_U_us_i_bysize,
479
+ pl_sb_U_us_i_bysize,
480
+ pl_sb_U_us_i,
481
+ ) = make_pl_si_lists(pl_sb_U_us_i_list, "i", 2)
482
+
483
+ # CLASSICAL "..us" -> "..i"
484
+
485
+ pl_sb_C_us_i_list = (
486
+ "focus",
487
+ "radius",
488
+ "genius",
489
+ "incubus",
490
+ "succubus",
491
+ "nimbus",
492
+ "fungus",
493
+ "nucleolus",
494
+ "stylus",
495
+ "torus",
496
+ "umbilicus",
497
+ "uterus",
498
+ "hippopotamus",
499
+ "cactus",
500
+ )
501
+
502
+ (
503
+ si_sb_C_us_i_list,
504
+ si_sb_C_us_i_bysize,
505
+ pl_sb_C_us_i_bysize,
506
+ pl_sb_C_us_i,
507
+ ) = make_pl_si_lists(pl_sb_C_us_i_list, "i", 2)
508
+
509
+
510
+ # CLASSICAL "..us" -> "..us" (ASSIMILATED 4TH DECLENSION LATIN NOUNS)
511
+
512
+ pl_sb_C_us_us = (
513
+ "status",
514
+ "apparatus",
515
+ "prospectus",
516
+ "sinus",
517
+ "hiatus",
518
+ "impetus",
519
+ "plexus",
520
+ )
521
+ pl_sb_C_us_us_bysize = bysize(pl_sb_C_us_us)
522
+
523
+ # UNCONDITIONAL "..on" -> "a"
524
+
525
+ pl_sb_U_on_a_list = (
526
+ "criterion",
527
+ "perihelion",
528
+ "aphelion",
529
+ "phenomenon",
530
+ "prolegomenon",
531
+ "noumenon",
532
+ "organon",
533
+ "asyndeton",
534
+ "hyperbaton",
535
+ )
536
+ (
537
+ si_sb_U_on_a_list,
538
+ si_sb_U_on_a_bysize,
539
+ pl_sb_U_on_a_bysize,
540
+ pl_sb_U_on_a,
541
+ ) = make_pl_si_lists(pl_sb_U_on_a_list, "a", 2)
542
+
543
+ # CLASSICAL "..on" -> "..a"
544
+
545
+ pl_sb_C_on_a_list = ("oxymoron",)
546
+
547
+ (
548
+ si_sb_C_on_a_list,
549
+ si_sb_C_on_a_bysize,
550
+ pl_sb_C_on_a_bysize,
551
+ pl_sb_C_on_a,
552
+ ) = make_pl_si_lists(pl_sb_C_on_a_list, "a", 2)
553
+
554
+
555
+ # CLASSICAL "..o" -> "..i" (BUT NORMALLY -> "..os")
556
+
557
+ pl_sb_C_o_i = [
558
+ "solo",
559
+ "soprano",
560
+ "basso",
561
+ "alto",
562
+ "contralto",
563
+ "tempo",
564
+ "piano",
565
+ "virtuoso",
566
+ ] # list not tuple so can concat for pl_sb_U_o_os
567
+
568
+ pl_sb_C_o_i_bysize = bysize(pl_sb_C_o_i)
569
+ si_sb_C_o_i_bysize = bysize([f"{w[:-1]}i" for w in pl_sb_C_o_i])
570
+
571
+ pl_sb_C_o_i_stems = joinstem(-1, pl_sb_C_o_i)
572
+
573
+ # ALWAYS "..o" -> "..os"
574
+
575
+ pl_sb_U_o_os_complete = {"ado", "ISO", "NATO", "NCO", "NGO", "oto"}
576
+ si_sb_U_o_os_complete = {f"{w}s" for w in pl_sb_U_o_os_complete}
577
+
578
+
579
+ pl_sb_U_o_os_endings = [
580
+ "aficionado",
581
+ "aggro",
582
+ "albino",
583
+ "allegro",
584
+ "ammo",
585
+ "Antananarivo",
586
+ "archipelago",
587
+ "armadillo",
588
+ "auto",
589
+ "avocado",
590
+ "Bamako",
591
+ "Barquisimeto",
592
+ "bimbo",
593
+ "bingo",
594
+ "Biro",
595
+ "bolero",
596
+ "Bolzano",
597
+ "bongo",
598
+ "Boto",
599
+ "burro",
600
+ "Cairo",
601
+ "canto",
602
+ "cappuccino",
603
+ "casino",
604
+ "cello",
605
+ "Chicago",
606
+ "Chimango",
607
+ "cilantro",
608
+ "cochito",
609
+ "coco",
610
+ "Colombo",
611
+ "Colorado",
612
+ "commando",
613
+ "concertino",
614
+ "contango",
615
+ "credo",
616
+ "crescendo",
617
+ "cyano",
618
+ "demo",
619
+ "ditto",
620
+ "Draco",
621
+ "dynamo",
622
+ "embryo",
623
+ "Esperanto",
624
+ "espresso",
625
+ "euro",
626
+ "falsetto",
627
+ "Faro",
628
+ "fiasco",
629
+ "Filipino",
630
+ "flamenco",
631
+ "furioso",
632
+ "generalissimo",
633
+ "Gestapo",
634
+ "ghetto",
635
+ "gigolo",
636
+ "gizmo",
637
+ "Greensboro",
638
+ "gringo",
639
+ "Guaiabero",
640
+ "guano",
641
+ "gumbo",
642
+ "gyro",
643
+ "hairdo",
644
+ "hippo",
645
+ "Idaho",
646
+ "impetigo",
647
+ "inferno",
648
+ "info",
649
+ "intermezzo",
650
+ "intertrigo",
651
+ "Iquico",
652
+ "jumbo",
653
+ "junto",
654
+ "Kakapo",
655
+ "kilo",
656
+ "Kinkimavo",
657
+ "Kokako",
658
+ "Kosovo",
659
+ "Lesotho",
660
+ "libero",
661
+ "libido",
662
+ "libretto",
663
+ "lido",
664
+ "Lilo",
665
+ "limbo",
666
+ "limo",
667
+ "lineno",
668
+ "lingo",
669
+ "lino",
670
+ "livedo",
671
+ "loco",
672
+ "logo",
673
+ "lumbago",
674
+ "macho",
675
+ "macro",
676
+ "mafioso",
677
+ "magneto",
678
+ "magnifico",
679
+ "Majuro",
680
+ "Malabo",
681
+ "manifesto",
682
+ "Maputo",
683
+ "Maracaibo",
684
+ "medico",
685
+ "memo",
686
+ "metro",
687
+ "Mexico",
688
+ "micro",
689
+ "Milano",
690
+ "Monaco",
691
+ "mono",
692
+ "Montenegro",
693
+ "Morocco",
694
+ "Muqdisho",
695
+ "myo",
696
+ "neutrino",
697
+ "Ningbo",
698
+ "octavo",
699
+ "oregano",
700
+ "Orinoco",
701
+ "Orlando",
702
+ "Oslo",
703
+ "panto",
704
+ "Paramaribo",
705
+ "Pardusco",
706
+ "pedalo",
707
+ "photo",
708
+ "pimento",
709
+ "pinto",
710
+ "pleco",
711
+ "Pluto",
712
+ "pogo",
713
+ "polo",
714
+ "poncho",
715
+ "Porto-Novo",
716
+ "Porto",
717
+ "pro",
718
+ "psycho",
719
+ "pueblo",
720
+ "quarto",
721
+ "Quito",
722
+ "repo",
723
+ "rhino",
724
+ "risotto",
725
+ "rococo",
726
+ "rondo",
727
+ "Sacramento",
728
+ "saddo",
729
+ "sago",
730
+ "salvo",
731
+ "Santiago",
732
+ "Sapporo",
733
+ "Sarajevo",
734
+ "scherzando",
735
+ "scherzo",
736
+ "silo",
737
+ "sirocco",
738
+ "sombrero",
739
+ "staccato",
740
+ "sterno",
741
+ "stucco",
742
+ "stylo",
743
+ "sumo",
744
+ "Taiko",
745
+ "techno",
746
+ "terrazzo",
747
+ "testudo",
748
+ "timpano",
749
+ "tiro",
750
+ "tobacco",
751
+ "Togo",
752
+ "Tokyo",
753
+ "torero",
754
+ "Torino",
755
+ "Toronto",
756
+ "torso",
757
+ "tremolo",
758
+ "typo",
759
+ "tyro",
760
+ "ufo",
761
+ "UNESCO",
762
+ "vaquero",
763
+ "vermicello",
764
+ "verso",
765
+ "vibrato",
766
+ "violoncello",
767
+ "Virgo",
768
+ "weirdo",
769
+ "WHO",
770
+ "WTO",
771
+ "Yamoussoukro",
772
+ "yo-yo",
773
+ "zero",
774
+ "Zibo",
775
+ ] + pl_sb_C_o_i
776
+
777
+ pl_sb_U_o_os_bysize = bysize(pl_sb_U_o_os_endings)
778
+ si_sb_U_o_os_bysize = bysize([f"{w}s" for w in pl_sb_U_o_os_endings])
779
+
780
+
781
+ # UNCONDITIONAL "..ch" -> "..chs"
782
+
783
+ pl_sb_U_ch_chs_list = ("czech", "eunuch", "stomach")
784
+
785
+ (
786
+ si_sb_U_ch_chs_list,
787
+ si_sb_U_ch_chs_bysize,
788
+ pl_sb_U_ch_chs_bysize,
789
+ pl_sb_U_ch_chs,
790
+ ) = make_pl_si_lists(pl_sb_U_ch_chs_list, "s", None)
791
+
792
+
793
+ # UNCONDITIONAL "..[ei]x" -> "..ices"
794
+
795
+ pl_sb_U_ex_ices_list = ("codex", "murex", "silex")
796
+ (
797
+ si_sb_U_ex_ices_list,
798
+ si_sb_U_ex_ices_bysize,
799
+ pl_sb_U_ex_ices_bysize,
800
+ pl_sb_U_ex_ices,
801
+ ) = make_pl_si_lists(pl_sb_U_ex_ices_list, "ices", 2)
802
+
803
+ pl_sb_U_ix_ices_list = ("radix", "helix")
804
+ (
805
+ si_sb_U_ix_ices_list,
806
+ si_sb_U_ix_ices_bysize,
807
+ pl_sb_U_ix_ices_bysize,
808
+ pl_sb_U_ix_ices,
809
+ ) = make_pl_si_lists(pl_sb_U_ix_ices_list, "ices", 2)
810
+
811
+ # CLASSICAL "..[ei]x" -> "..ices"
812
+
813
+ pl_sb_C_ex_ices_list = (
814
+ "vortex",
815
+ "vertex",
816
+ "cortex",
817
+ "latex",
818
+ "pontifex",
819
+ "apex",
820
+ "index",
821
+ "simplex",
822
+ )
823
+
824
+ (
825
+ si_sb_C_ex_ices_list,
826
+ si_sb_C_ex_ices_bysize,
827
+ pl_sb_C_ex_ices_bysize,
828
+ pl_sb_C_ex_ices,
829
+ ) = make_pl_si_lists(pl_sb_C_ex_ices_list, "ices", 2)
830
+
831
+
832
+ pl_sb_C_ix_ices_list = ("appendix",)
833
+
834
+ (
835
+ si_sb_C_ix_ices_list,
836
+ si_sb_C_ix_ices_bysize,
837
+ pl_sb_C_ix_ices_bysize,
838
+ pl_sb_C_ix_ices,
839
+ ) = make_pl_si_lists(pl_sb_C_ix_ices_list, "ices", 2)
840
+
841
+
842
+ # ARABIC: ".." -> "..i"
843
+
844
+ pl_sb_C_i_list = ("afrit", "afreet", "efreet")
845
+
846
+ (si_sb_C_i_list, si_sb_C_i_bysize, pl_sb_C_i_bysize, pl_sb_C_i) = make_pl_si_lists(
847
+ pl_sb_C_i_list, "i", None
848
+ )
849
+
850
+
851
+ # HEBREW: ".." -> "..im"
852
+
853
+ pl_sb_C_im_list = ("goy", "seraph", "cherub")
854
+
855
+ (si_sb_C_im_list, si_sb_C_im_bysize, pl_sb_C_im_bysize, pl_sb_C_im) = make_pl_si_lists(
856
+ pl_sb_C_im_list, "im", None
857
+ )
858
+
859
+
860
+ # UNCONDITIONAL "..man" -> "..mans"
861
+
862
+ pl_sb_U_man_mans_list = """
863
+ ataman caiman cayman ceriman
864
+ desman dolman farman harman hetman
865
+ human leman ottoman shaman talisman
866
+ """.split()
867
+ pl_sb_U_man_mans_caps_list = """
868
+ Alabaman Bahaman Burman German
869
+ Hiroshiman Liman Nakayaman Norman Oklahoman
870
+ Panaman Roman Selman Sonaman Tacoman Yakiman
871
+ Yokohaman Yuman
872
+ """.split()
873
+
874
+ (
875
+ si_sb_U_man_mans_list,
876
+ si_sb_U_man_mans_bysize,
877
+ pl_sb_U_man_mans_bysize,
878
+ ) = make_pl_si_lists(pl_sb_U_man_mans_list, "s", None, dojoinstem=False)
879
+ (
880
+ si_sb_U_man_mans_caps_list,
881
+ si_sb_U_man_mans_caps_bysize,
882
+ pl_sb_U_man_mans_caps_bysize,
883
+ ) = make_pl_si_lists(pl_sb_U_man_mans_caps_list, "s", None, dojoinstem=False)
884
+
885
+ # UNCONDITIONAL "..louse" -> "..lice"
886
+ pl_sb_U_louse_lice_list = ("booklouse", "grapelouse", "louse", "woodlouse")
887
+
888
+ (
889
+ si_sb_U_louse_lice_list,
890
+ si_sb_U_louse_lice_bysize,
891
+ pl_sb_U_louse_lice_bysize,
892
+ ) = make_pl_si_lists(pl_sb_U_louse_lice_list, "lice", 5, dojoinstem=False)
893
+
894
+ pl_sb_uninflected_s_complete = [
895
+ # PAIRS OR GROUPS SUBSUMED TO A SINGULAR...
896
+ "breeches",
897
+ "britches",
898
+ "pajamas",
899
+ "pyjamas",
900
+ "clippers",
901
+ "gallows",
902
+ "hijinks",
903
+ "headquarters",
904
+ "pliers",
905
+ "scissors",
906
+ "testes",
907
+ "herpes",
908
+ "pincers",
909
+ "shears",
910
+ "proceedings",
911
+ "trousers",
912
+ # UNASSIMILATED LATIN 4th DECLENSION
913
+ "cantus",
914
+ "coitus",
915
+ "nexus",
916
+ # RECENT IMPORTS...
917
+ "contretemps",
918
+ "corps",
919
+ "debris",
920
+ "siemens",
921
+ # DISEASES
922
+ "mumps",
923
+ # MISCELLANEOUS OTHERS...
924
+ "diabetes",
925
+ "jackanapes",
926
+ "series",
927
+ "species",
928
+ "subspecies",
929
+ "rabies",
930
+ "chassis",
931
+ "innings",
932
+ "news",
933
+ "mews",
934
+ "haggis",
935
+ ]
936
+
937
+ pl_sb_uninflected_s_endings = [
938
+ # RECENT IMPORTS...
939
+ "ois",
940
+ # DISEASES
941
+ "measles",
942
+ ]
943
+
944
+ pl_sb_uninflected_s = pl_sb_uninflected_s_complete + [
945
+ f".*{w}" for w in pl_sb_uninflected_s_endings
946
+ ]
947
+
948
+ pl_sb_uninflected_herd = (
949
+ # DON'T INFLECT IN CLASSICAL MODE, OTHERWISE NORMAL INFLECTION
950
+ "wildebeest",
951
+ "swine",
952
+ "eland",
953
+ "bison",
954
+ "buffalo",
955
+ "cattle",
956
+ "elk",
957
+ "rhinoceros",
958
+ "zucchini",
959
+ "caribou",
960
+ "dace",
961
+ "grouse",
962
+ "guinea fowl",
963
+ "guinea-fowl",
964
+ "haddock",
965
+ "hake",
966
+ "halibut",
967
+ "herring",
968
+ "mackerel",
969
+ "pickerel",
970
+ "pike",
971
+ "roe",
972
+ "seed",
973
+ "shad",
974
+ "snipe",
975
+ "teal",
976
+ "turbot",
977
+ "water fowl",
978
+ "water-fowl",
979
+ )
980
+
981
+ pl_sb_uninflected_complete = [
982
+ # SOME FISH AND HERD ANIMALS
983
+ "tuna",
984
+ "salmon",
985
+ "mackerel",
986
+ "trout",
987
+ "bream",
988
+ "sea-bass",
989
+ "sea bass",
990
+ "carp",
991
+ "cod",
992
+ "flounder",
993
+ "whiting",
994
+ "moose",
995
+ # OTHER ODDITIES
996
+ "graffiti",
997
+ "djinn",
998
+ "samuri",
999
+ "offspring",
1000
+ "pence",
1001
+ "quid",
1002
+ "hertz",
1003
+ ] + pl_sb_uninflected_s_complete
1004
+ # SOME WORDS ENDING IN ...s (OFTEN PAIRS TAKEN AS A WHOLE)
1005
+
1006
+ pl_sb_uninflected_caps = [
1007
+ # ALL NATIONALS ENDING IN -ese
1008
+ "Portuguese",
1009
+ "Amoyese",
1010
+ "Borghese",
1011
+ "Congoese",
1012
+ "Faroese",
1013
+ "Foochowese",
1014
+ "Genevese",
1015
+ "Genoese",
1016
+ "Gilbertese",
1017
+ "Hottentotese",
1018
+ "Kiplingese",
1019
+ "Kongoese",
1020
+ "Lucchese",
1021
+ "Maltese",
1022
+ "Nankingese",
1023
+ "Niasese",
1024
+ "Pekingese",
1025
+ "Piedmontese",
1026
+ "Pistoiese",
1027
+ "Sarawakese",
1028
+ "Shavese",
1029
+ "Vermontese",
1030
+ "Wenchowese",
1031
+ "Yengeese",
1032
+ ]
1033
+
1034
+
1035
+ pl_sb_uninflected_endings = [
1036
+ # UNCOUNTABLE NOUNS
1037
+ "butter",
1038
+ "cash",
1039
+ "furniture",
1040
+ "information",
1041
+ # SOME FISH AND HERD ANIMALS
1042
+ "fish",
1043
+ "deer",
1044
+ "sheep",
1045
+ # ALL NATIONALS ENDING IN -ese
1046
+ "nese",
1047
+ "rese",
1048
+ "lese",
1049
+ "mese",
1050
+ # DISEASES
1051
+ "pox",
1052
+ # OTHER ODDITIES
1053
+ "craft",
1054
+ ] + pl_sb_uninflected_s_endings
1055
+ # SOME WORDS ENDING IN ...s (OFTEN PAIRS TAKEN AS A WHOLE)
1056
+
1057
+
1058
+ pl_sb_uninflected_bysize = bysize(pl_sb_uninflected_endings)
1059
+
1060
+
1061
+ # SINGULAR WORDS ENDING IN ...s (ALL INFLECT WITH ...es)
1062
+
1063
+ pl_sb_singular_s_complete = [
1064
+ "acropolis",
1065
+ "aegis",
1066
+ "alias",
1067
+ "asbestos",
1068
+ "bathos",
1069
+ "bias",
1070
+ "bronchitis",
1071
+ "bursitis",
1072
+ "caddis",
1073
+ "cannabis",
1074
+ "canvas",
1075
+ "chaos",
1076
+ "cosmos",
1077
+ "dais",
1078
+ "digitalis",
1079
+ "epidermis",
1080
+ "ethos",
1081
+ "eyas",
1082
+ "gas",
1083
+ "glottis",
1084
+ "hubris",
1085
+ "ibis",
1086
+ "lens",
1087
+ "mantis",
1088
+ "marquis",
1089
+ "metropolis",
1090
+ "pathos",
1091
+ "pelvis",
1092
+ "polis",
1093
+ "rhinoceros",
1094
+ "sassafras",
1095
+ "trellis",
1096
+ ] + pl_sb_C_is_ides_complete
1097
+
1098
+
1099
+ pl_sb_singular_s_endings = ["ss", "us"] + pl_sb_C_is_ides_endings
1100
+
1101
+ pl_sb_singular_s_bysize = bysize(pl_sb_singular_s_endings)
1102
+
1103
+ si_sb_singular_s_complete = [f"{w}es" for w in pl_sb_singular_s_complete]
1104
+ si_sb_singular_s_endings = [f"{w}es" for w in pl_sb_singular_s_endings]
1105
+ si_sb_singular_s_bysize = bysize(si_sb_singular_s_endings)
1106
+
1107
+ pl_sb_singular_s_es = ["[A-Z].*es"]
1108
+
1109
+ pl_sb_singular_s = enclose(
1110
+ "|".join(
1111
+ pl_sb_singular_s_complete
1112
+ + [f".*{w}" for w in pl_sb_singular_s_endings]
1113
+ + pl_sb_singular_s_es
1114
+ )
1115
+ )
1116
+
1117
+
1118
+ # PLURALS ENDING IN uses -> use
1119
+
1120
+
1121
+ si_sb_ois_oi_case = ("Bolshois", "Hanois")
1122
+
1123
+ si_sb_uses_use_case = ("Betelgeuses", "Duses", "Meuses", "Syracuses", "Toulouses")
1124
+
1125
+ si_sb_uses_use = (
1126
+ "abuses",
1127
+ "applauses",
1128
+ "blouses",
1129
+ "carouses",
1130
+ "causes",
1131
+ "chartreuses",
1132
+ "clauses",
1133
+ "contuses",
1134
+ "douses",
1135
+ "excuses",
1136
+ "fuses",
1137
+ "grouses",
1138
+ "hypotenuses",
1139
+ "masseuses",
1140
+ "menopauses",
1141
+ "misuses",
1142
+ "muses",
1143
+ "overuses",
1144
+ "pauses",
1145
+ "peruses",
1146
+ "profuses",
1147
+ "recluses",
1148
+ "reuses",
1149
+ "ruses",
1150
+ "souses",
1151
+ "spouses",
1152
+ "suffuses",
1153
+ "transfuses",
1154
+ "uses",
1155
+ )
1156
+
1157
+ si_sb_ies_ie_case = (
1158
+ "Addies",
1159
+ "Aggies",
1160
+ "Allies",
1161
+ "Amies",
1162
+ "Angies",
1163
+ "Annies",
1164
+ "Annmaries",
1165
+ "Archies",
1166
+ "Arties",
1167
+ "Aussies",
1168
+ "Barbies",
1169
+ "Barries",
1170
+ "Basies",
1171
+ "Bennies",
1172
+ "Bernies",
1173
+ "Berties",
1174
+ "Bessies",
1175
+ "Betties",
1176
+ "Billies",
1177
+ "Blondies",
1178
+ "Bobbies",
1179
+ "Bonnies",
1180
+ "Bowies",
1181
+ "Brandies",
1182
+ "Bries",
1183
+ "Brownies",
1184
+ "Callies",
1185
+ "Carnegies",
1186
+ "Carries",
1187
+ "Cassies",
1188
+ "Charlies",
1189
+ "Cheries",
1190
+ "Christies",
1191
+ "Connies",
1192
+ "Curies",
1193
+ "Dannies",
1194
+ "Debbies",
1195
+ "Dixies",
1196
+ "Dollies",
1197
+ "Donnies",
1198
+ "Drambuies",
1199
+ "Eddies",
1200
+ "Effies",
1201
+ "Ellies",
1202
+ "Elsies",
1203
+ "Eries",
1204
+ "Ernies",
1205
+ "Essies",
1206
+ "Eugenies",
1207
+ "Fannies",
1208
+ "Flossies",
1209
+ "Frankies",
1210
+ "Freddies",
1211
+ "Gillespies",
1212
+ "Goldies",
1213
+ "Gracies",
1214
+ "Guthries",
1215
+ "Hallies",
1216
+ "Hatties",
1217
+ "Hetties",
1218
+ "Hollies",
1219
+ "Jackies",
1220
+ "Jamies",
1221
+ "Janies",
1222
+ "Jannies",
1223
+ "Jeanies",
1224
+ "Jeannies",
1225
+ "Jennies",
1226
+ "Jessies",
1227
+ "Jimmies",
1228
+ "Jodies",
1229
+ "Johnies",
1230
+ "Johnnies",
1231
+ "Josies",
1232
+ "Julies",
1233
+ "Kalgoorlies",
1234
+ "Kathies",
1235
+ "Katies",
1236
+ "Kellies",
1237
+ "Kewpies",
1238
+ "Kristies",
1239
+ "Laramies",
1240
+ "Lassies",
1241
+ "Lauries",
1242
+ "Leslies",
1243
+ "Lessies",
1244
+ "Lillies",
1245
+ "Lizzies",
1246
+ "Lonnies",
1247
+ "Lories",
1248
+ "Lorries",
1249
+ "Lotties",
1250
+ "Louies",
1251
+ "Mackenzies",
1252
+ "Maggies",
1253
+ "Maisies",
1254
+ "Mamies",
1255
+ "Marcies",
1256
+ "Margies",
1257
+ "Maries",
1258
+ "Marjories",
1259
+ "Matties",
1260
+ "McKenzies",
1261
+ "Melanies",
1262
+ "Mickies",
1263
+ "Millies",
1264
+ "Minnies",
1265
+ "Mollies",
1266
+ "Mounties",
1267
+ "Nannies",
1268
+ "Natalies",
1269
+ "Nellies",
1270
+ "Netties",
1271
+ "Ollies",
1272
+ "Ozzies",
1273
+ "Pearlies",
1274
+ "Pottawatomies",
1275
+ "Reggies",
1276
+ "Richies",
1277
+ "Rickies",
1278
+ "Robbies",
1279
+ "Ronnies",
1280
+ "Rosalies",
1281
+ "Rosemaries",
1282
+ "Rosies",
1283
+ "Roxies",
1284
+ "Rushdies",
1285
+ "Ruthies",
1286
+ "Sadies",
1287
+ "Sallies",
1288
+ "Sammies",
1289
+ "Scotties",
1290
+ "Selassies",
1291
+ "Sherries",
1292
+ "Sophies",
1293
+ "Stacies",
1294
+ "Stefanies",
1295
+ "Stephanies",
1296
+ "Stevies",
1297
+ "Susies",
1298
+ "Sylvies",
1299
+ "Tammies",
1300
+ "Terries",
1301
+ "Tessies",
1302
+ "Tommies",
1303
+ "Tracies",
1304
+ "Trekkies",
1305
+ "Valaries",
1306
+ "Valeries",
1307
+ "Valkyries",
1308
+ "Vickies",
1309
+ "Virgies",
1310
+ "Willies",
1311
+ "Winnies",
1312
+ "Wylies",
1313
+ "Yorkies",
1314
+ )
1315
+
1316
+ si_sb_ies_ie = (
1317
+ "aeries",
1318
+ "baggies",
1319
+ "belies",
1320
+ "biggies",
1321
+ "birdies",
1322
+ "bogies",
1323
+ "bonnies",
1324
+ "boogies",
1325
+ "bookies",
1326
+ "bourgeoisies",
1327
+ "brownies",
1328
+ "budgies",
1329
+ "caddies",
1330
+ "calories",
1331
+ "camaraderies",
1332
+ "cockamamies",
1333
+ "collies",
1334
+ "cookies",
1335
+ "coolies",
1336
+ "cooties",
1337
+ "coteries",
1338
+ "crappies",
1339
+ "curies",
1340
+ "cutesies",
1341
+ "dogies",
1342
+ "eyries",
1343
+ "floozies",
1344
+ "footsies",
1345
+ "freebies",
1346
+ "genies",
1347
+ "goalies",
1348
+ "groupies",
1349
+ "hies",
1350
+ "jalousies",
1351
+ "junkies",
1352
+ "kiddies",
1353
+ "laddies",
1354
+ "lassies",
1355
+ "lies",
1356
+ "lingeries",
1357
+ "magpies",
1358
+ "menageries",
1359
+ "mommies",
1360
+ "movies",
1361
+ "neckties",
1362
+ "newbies",
1363
+ "nighties",
1364
+ "oldies",
1365
+ "organdies",
1366
+ "overlies",
1367
+ "pies",
1368
+ "pinkies",
1369
+ "pixies",
1370
+ "potpies",
1371
+ "prairies",
1372
+ "quickies",
1373
+ "reveries",
1374
+ "rookies",
1375
+ "rotisseries",
1376
+ "softies",
1377
+ "sorties",
1378
+ "species",
1379
+ "stymies",
1380
+ "sweeties",
1381
+ "ties",
1382
+ "underlies",
1383
+ "unties",
1384
+ "veggies",
1385
+ "vies",
1386
+ "yuppies",
1387
+ "zombies",
1388
+ )
1389
+
1390
+
1391
+ si_sb_oes_oe_case = (
1392
+ "Chloes",
1393
+ "Crusoes",
1394
+ "Defoes",
1395
+ "Faeroes",
1396
+ "Ivanhoes",
1397
+ "Joes",
1398
+ "McEnroes",
1399
+ "Moes",
1400
+ "Monroes",
1401
+ "Noes",
1402
+ "Poes",
1403
+ "Roscoes",
1404
+ "Tahoes",
1405
+ "Tippecanoes",
1406
+ "Zoes",
1407
+ )
1408
+
1409
+ si_sb_oes_oe = (
1410
+ "aloes",
1411
+ "backhoes",
1412
+ "canoes",
1413
+ "does",
1414
+ "floes",
1415
+ "foes",
1416
+ "hoes",
1417
+ "mistletoes",
1418
+ "oboes",
1419
+ "pekoes",
1420
+ "roes",
1421
+ "sloes",
1422
+ "throes",
1423
+ "tiptoes",
1424
+ "toes",
1425
+ "woes",
1426
+ )
1427
+
1428
+ si_sb_z_zes = ("quartzes", "topazes")
1429
+
1430
+ si_sb_zzes_zz = ("buzzes", "fizzes", "frizzes", "razzes")
1431
+
1432
+ si_sb_ches_che_case = (
1433
+ "Andromaches",
1434
+ "Apaches",
1435
+ "Blanches",
1436
+ "Comanches",
1437
+ "Nietzsches",
1438
+ "Porsches",
1439
+ "Roches",
1440
+ )
1441
+
1442
+ si_sb_ches_che = (
1443
+ "aches",
1444
+ "avalanches",
1445
+ "backaches",
1446
+ "bellyaches",
1447
+ "caches",
1448
+ "cloches",
1449
+ "creches",
1450
+ "douches",
1451
+ "earaches",
1452
+ "fiches",
1453
+ "headaches",
1454
+ "heartaches",
1455
+ "microfiches",
1456
+ "niches",
1457
+ "pastiches",
1458
+ "psyches",
1459
+ "quiches",
1460
+ "stomachaches",
1461
+ "toothaches",
1462
+ "tranches",
1463
+ )
1464
+
1465
+ si_sb_xes_xe = ("annexes", "axes", "deluxes", "pickaxes")
1466
+
1467
+ si_sb_sses_sse_case = ("Hesses", "Jesses", "Larousses", "Matisses")
1468
+ si_sb_sses_sse = (
1469
+ "bouillabaisses",
1470
+ "crevasses",
1471
+ "demitasses",
1472
+ "impasses",
1473
+ "mousses",
1474
+ "posses",
1475
+ )
1476
+
1477
+ si_sb_ves_ve_case = (
1478
+ # *[nwl]ives -> [nwl]live
1479
+ "Clives",
1480
+ "Palmolives",
1481
+ )
1482
+ si_sb_ves_ve = (
1483
+ # *[^d]eaves -> eave
1484
+ "interweaves",
1485
+ "weaves",
1486
+ # *[nwl]ives -> [nwl]live
1487
+ "olives",
1488
+ # *[eoa]lves -> [eoa]lve
1489
+ "bivalves",
1490
+ "dissolves",
1491
+ "resolves",
1492
+ "salves",
1493
+ "twelves",
1494
+ "valves",
1495
+ )
1496
+
1497
+
1498
+ plverb_special_s = enclose(
1499
+ "|".join(
1500
+ [pl_sb_singular_s]
1501
+ + pl_sb_uninflected_s
1502
+ + list(pl_sb_irregular_s)
1503
+ + ["(.*[csx])is", "(.*)ceps", "[A-Z].*s"]
1504
+ )
1505
+ )
1506
+
1507
+ _pl_sb_postfix_adj_defn = (
1508
+ ("general", enclose(r"(?!major|lieutenant|brigadier|adjutant|.*star)\S+")),
1509
+ ("martial", enclose("court")),
1510
+ ("force", enclose("pound")),
1511
+ )
1512
+
1513
+ pl_sb_postfix_adj: Iterable[str] = (
1514
+ enclose(val + f"(?=(?:-|\\s+){key})") for key, val in _pl_sb_postfix_adj_defn
1515
+ )
1516
+
1517
+ pl_sb_postfix_adj_stems = f"({'|'.join(pl_sb_postfix_adj)})(.*)"
1518
+
1519
+
1520
+ # PLURAL WORDS ENDING IS es GO TO SINGULAR is
1521
+
1522
+ si_sb_es_is = (
1523
+ "amanuenses",
1524
+ "amniocenteses",
1525
+ "analyses",
1526
+ "antitheses",
1527
+ "apotheoses",
1528
+ "arterioscleroses",
1529
+ "atheroscleroses",
1530
+ "axes",
1531
+ # 'bases', # bases -> basis
1532
+ "catalyses",
1533
+ "catharses",
1534
+ "chasses",
1535
+ "cirrhoses",
1536
+ "cocces",
1537
+ "crises",
1538
+ "diagnoses",
1539
+ "dialyses",
1540
+ "diereses",
1541
+ "electrolyses",
1542
+ "emphases",
1543
+ "exegeses",
1544
+ "geneses",
1545
+ "halitoses",
1546
+ "hydrolyses",
1547
+ "hypnoses",
1548
+ "hypotheses",
1549
+ "hystereses",
1550
+ "metamorphoses",
1551
+ "metastases",
1552
+ "misdiagnoses",
1553
+ "mitoses",
1554
+ "mononucleoses",
1555
+ "narcoses",
1556
+ "necroses",
1557
+ "nemeses",
1558
+ "neuroses",
1559
+ "oases",
1560
+ "osmoses",
1561
+ "osteoporoses",
1562
+ "paralyses",
1563
+ "parentheses",
1564
+ "parthenogeneses",
1565
+ "periphrases",
1566
+ "photosyntheses",
1567
+ "probosces",
1568
+ "prognoses",
1569
+ "prophylaxes",
1570
+ "prostheses",
1571
+ "preces",
1572
+ "psoriases",
1573
+ "psychoanalyses",
1574
+ "psychokineses",
1575
+ "psychoses",
1576
+ "scleroses",
1577
+ "scolioses",
1578
+ "sepses",
1579
+ "silicoses",
1580
+ "symbioses",
1581
+ "synopses",
1582
+ "syntheses",
1583
+ "taxes",
1584
+ "telekineses",
1585
+ "theses",
1586
+ "thromboses",
1587
+ "tuberculoses",
1588
+ "urinalyses",
1589
+ )
1590
+
1591
+ pl_prep_list = """
1592
+ about above across after among around at athwart before behind
1593
+ below beneath beside besides between betwixt beyond but by
1594
+ during except for from in into near of off on onto out over
1595
+ since till to under until unto upon with""".split()
1596
+
1597
+ pl_prep_list_da = pl_prep_list + ["de", "du", "da"]
1598
+
1599
+ pl_prep_bysize = bysize(pl_prep_list_da)
1600
+
1601
+ pl_prep = enclose("|".join(pl_prep_list_da))
1602
+
1603
+ pl_sb_prep_dual_compound = rf"(.*?)((?:-|\s+)(?:{pl_prep})(?:-|\s+))a(?:-|\s+)(.*)"
1604
+
1605
+
1606
+ singular_pronoun_genders = {
1607
+ "neuter",
1608
+ "feminine",
1609
+ "masculine",
1610
+ "gender-neutral",
1611
+ "feminine or masculine",
1612
+ "masculine or feminine",
1613
+ }
1614
+
1615
+ pl_pron_nom = {
1616
+ # NOMINATIVE REFLEXIVE
1617
+ "i": "we",
1618
+ "myself": "ourselves",
1619
+ "you": "you",
1620
+ "yourself": "yourselves",
1621
+ "she": "they",
1622
+ "herself": "themselves",
1623
+ "he": "they",
1624
+ "himself": "themselves",
1625
+ "it": "they",
1626
+ "itself": "themselves",
1627
+ "they": "they",
1628
+ "themself": "themselves",
1629
+ # POSSESSIVE
1630
+ "mine": "ours",
1631
+ "yours": "yours",
1632
+ "hers": "theirs",
1633
+ "his": "theirs",
1634
+ "its": "theirs",
1635
+ "theirs": "theirs",
1636
+ }
1637
+
1638
+ si_pron: Dict[str, Dict[str, Union[str, Dict[str, str]]]] = {
1639
+ "nom": {v: k for (k, v) in pl_pron_nom.items()}
1640
+ }
1641
+ si_pron["nom"]["we"] = "I"
1642
+
1643
+
1644
+ pl_pron_acc = {
1645
+ # ACCUSATIVE REFLEXIVE
1646
+ "me": "us",
1647
+ "myself": "ourselves",
1648
+ "you": "you",
1649
+ "yourself": "yourselves",
1650
+ "her": "them",
1651
+ "herself": "themselves",
1652
+ "him": "them",
1653
+ "himself": "themselves",
1654
+ "it": "them",
1655
+ "itself": "themselves",
1656
+ "them": "them",
1657
+ "themself": "themselves",
1658
+ }
1659
+
1660
+ pl_pron_acc_keys = enclose("|".join(pl_pron_acc))
1661
+ pl_pron_acc_keys_bysize = bysize(pl_pron_acc)
1662
+
1663
+ si_pron["acc"] = {v: k for (k, v) in pl_pron_acc.items()}
1664
+
1665
+ for _thecase, _plur, _gend, _sing in (
1666
+ ("nom", "they", "neuter", "it"),
1667
+ ("nom", "they", "feminine", "she"),
1668
+ ("nom", "they", "masculine", "he"),
1669
+ ("nom", "they", "gender-neutral", "they"),
1670
+ ("nom", "they", "feminine or masculine", "she or he"),
1671
+ ("nom", "they", "masculine or feminine", "he or she"),
1672
+ ("nom", "themselves", "neuter", "itself"),
1673
+ ("nom", "themselves", "feminine", "herself"),
1674
+ ("nom", "themselves", "masculine", "himself"),
1675
+ ("nom", "themselves", "gender-neutral", "themself"),
1676
+ ("nom", "themselves", "feminine or masculine", "herself or himself"),
1677
+ ("nom", "themselves", "masculine or feminine", "himself or herself"),
1678
+ ("nom", "theirs", "neuter", "its"),
1679
+ ("nom", "theirs", "feminine", "hers"),
1680
+ ("nom", "theirs", "masculine", "his"),
1681
+ ("nom", "theirs", "gender-neutral", "theirs"),
1682
+ ("nom", "theirs", "feminine or masculine", "hers or his"),
1683
+ ("nom", "theirs", "masculine or feminine", "his or hers"),
1684
+ ("acc", "them", "neuter", "it"),
1685
+ ("acc", "them", "feminine", "her"),
1686
+ ("acc", "them", "masculine", "him"),
1687
+ ("acc", "them", "gender-neutral", "them"),
1688
+ ("acc", "them", "feminine or masculine", "her or him"),
1689
+ ("acc", "them", "masculine or feminine", "him or her"),
1690
+ ("acc", "themselves", "neuter", "itself"),
1691
+ ("acc", "themselves", "feminine", "herself"),
1692
+ ("acc", "themselves", "masculine", "himself"),
1693
+ ("acc", "themselves", "gender-neutral", "themself"),
1694
+ ("acc", "themselves", "feminine or masculine", "herself or himself"),
1695
+ ("acc", "themselves", "masculine or feminine", "himself or herself"),
1696
+ ):
1697
+ try:
1698
+ si_pron[_thecase][_plur][_gend] = _sing # type: ignore
1699
+ except TypeError:
1700
+ si_pron[_thecase][_plur] = {}
1701
+ si_pron[_thecase][_plur][_gend] = _sing # type: ignore
1702
+
1703
+
1704
+ si_pron_acc_keys = enclose("|".join(si_pron["acc"]))
1705
+ si_pron_acc_keys_bysize = bysize(si_pron["acc"])
1706
+
1707
+
1708
+ def get_si_pron(thecase, word, gender) -> str:
1709
+ try:
1710
+ sing = si_pron[thecase][word]
1711
+ except KeyError:
1712
+ raise # not a pronoun
1713
+ try:
1714
+ return sing[gender] # has several types due to gender
1715
+ except TypeError:
1716
+ return cast(str, sing) # answer independent of gender
1717
+
1718
+
1719
+ # These dictionaries group verbs by first, second and third person
1720
+ # conjugations.
1721
+
1722
+ plverb_irregular_pres = {
1723
+ "am": "are",
1724
+ "are": "are",
1725
+ "is": "are",
1726
+ "was": "were",
1727
+ "were": "were",
1728
+ "have": "have",
1729
+ "has": "have",
1730
+ "do": "do",
1731
+ "does": "do",
1732
+ }
1733
+
1734
+ plverb_ambiguous_pres = {
1735
+ "act": "act",
1736
+ "acts": "act",
1737
+ "blame": "blame",
1738
+ "blames": "blame",
1739
+ "can": "can",
1740
+ "must": "must",
1741
+ "fly": "fly",
1742
+ "flies": "fly",
1743
+ "copy": "copy",
1744
+ "copies": "copy",
1745
+ "drink": "drink",
1746
+ "drinks": "drink",
1747
+ "fight": "fight",
1748
+ "fights": "fight",
1749
+ "fire": "fire",
1750
+ "fires": "fire",
1751
+ "like": "like",
1752
+ "likes": "like",
1753
+ "look": "look",
1754
+ "looks": "look",
1755
+ "make": "make",
1756
+ "makes": "make",
1757
+ "reach": "reach",
1758
+ "reaches": "reach",
1759
+ "run": "run",
1760
+ "runs": "run",
1761
+ "sink": "sink",
1762
+ "sinks": "sink",
1763
+ "sleep": "sleep",
1764
+ "sleeps": "sleep",
1765
+ "view": "view",
1766
+ "views": "view",
1767
+ }
1768
+
1769
+ plverb_ambiguous_pres_keys = re.compile(
1770
+ rf"^({enclose('|'.join(plverb_ambiguous_pres))})((\s.*)?)$", re.IGNORECASE
1771
+ )
1772
+
1773
+
1774
+ plverb_irregular_non_pres = (
1775
+ "did",
1776
+ "had",
1777
+ "ate",
1778
+ "made",
1779
+ "put",
1780
+ "spent",
1781
+ "fought",
1782
+ "sank",
1783
+ "gave",
1784
+ "sought",
1785
+ "shall",
1786
+ "could",
1787
+ "ought",
1788
+ "should",
1789
+ )
1790
+
1791
+ plverb_ambiguous_non_pres = re.compile(
1792
+ r"^((?:thought|saw|bent|will|might|cut))((\s.*)?)$", re.IGNORECASE
1793
+ )
1794
+
1795
+ # "..oes" -> "..oe" (the rest are "..oes" -> "o")
1796
+
1797
+ pl_v_oes_oe = ("canoes", "floes", "oboes", "roes", "throes", "woes")
1798
+ pl_v_oes_oe_endings_size4 = ("hoes", "toes")
1799
+ pl_v_oes_oe_endings_size5 = ("shoes",)
1800
+
1801
+
1802
+ pl_count_zero = ("0", "no", "zero", "nil")
1803
+
1804
+
1805
+ pl_count_one = ("1", "a", "an", "one", "each", "every", "this", "that")
1806
+
1807
+ pl_adj_special = {"a": "some", "an": "some", "this": "these", "that": "those"}
1808
+
1809
+ pl_adj_special_keys = re.compile(
1810
+ rf"^({enclose('|'.join(pl_adj_special))})$", re.IGNORECASE
1811
+ )
1812
+
1813
+ pl_adj_poss = {
1814
+ "my": "our",
1815
+ "your": "your",
1816
+ "its": "their",
1817
+ "her": "their",
1818
+ "his": "their",
1819
+ "their": "their",
1820
+ }
1821
+
1822
+ pl_adj_poss_keys = re.compile(rf"^({enclose('|'.join(pl_adj_poss))})$", re.IGNORECASE)
1823
+
1824
+
1825
+ # 2. INDEFINITE ARTICLES
1826
+
1827
+ # THIS PATTERN MATCHES STRINGS OF CAPITALS STARTING WITH A "VOWEL-SOUND"
1828
+ # CONSONANT FOLLOWED BY ANOTHER CONSONANT, AND WHICH ARE NOT LIKELY
1829
+ # TO BE REAL WORDS (OH, ALL RIGHT THEN, IT'S JUST MAGIC!)
1830
+
1831
+ A_abbrev = re.compile(
1832
+ r"""
1833
+ ^(?! FJO | [HLMNS]Y. | RY[EO] | SQU
1834
+ | ( F[LR]? | [HL] | MN? | N | RH? | S[CHKLMNPTVW]? | X(YL)?) [AEIOU])
1835
+ [FHLMNRSX][A-Z]
1836
+ """,
1837
+ re.VERBOSE,
1838
+ )
1839
+
1840
+ # THIS PATTERN CODES THE BEGINNINGS OF ALL ENGLISH WORDS BEGINING WITH A
1841
+ # 'y' FOLLOWED BY A CONSONANT. ANY OTHER Y-CONSONANT PREFIX THEREFORE
1842
+ # IMPLIES AN ABBREVIATION.
1843
+
1844
+ A_y_cons = re.compile(r"^(y(b[lor]|cl[ea]|fere|gg|p[ios]|rou|tt))", re.IGNORECASE)
1845
+
1846
+ # EXCEPTIONS TO EXCEPTIONS
1847
+
1848
+ A_explicit_a = re.compile(r"^((?:unabomber|unanimous|US))", re.IGNORECASE)
1849
+
1850
+ A_explicit_an = re.compile(
1851
+ r"^((?:euler|hour(?!i)|heir|honest|hono[ur]|mpeg))", re.IGNORECASE
1852
+ )
1853
+
1854
+ A_ordinal_an = re.compile(r"^([aefhilmnorsx]-?th)", re.IGNORECASE)
1855
+
1856
+ A_ordinal_a = re.compile(r"^([bcdgjkpqtuvwyz]-?th)", re.IGNORECASE)
1857
+
1858
+
1859
+ # NUMERICAL INFLECTIONS
1860
+
1861
+ nth = {
1862
+ 0: "th",
1863
+ 1: "st",
1864
+ 2: "nd",
1865
+ 3: "rd",
1866
+ 4: "th",
1867
+ 5: "th",
1868
+ 6: "th",
1869
+ 7: "th",
1870
+ 8: "th",
1871
+ 9: "th",
1872
+ 11: "th",
1873
+ 12: "th",
1874
+ 13: "th",
1875
+ }
1876
+ nth_suff = set(nth.values())
1877
+
1878
+ ordinal = dict(
1879
+ ty="tieth",
1880
+ one="first",
1881
+ two="second",
1882
+ three="third",
1883
+ five="fifth",
1884
+ eight="eighth",
1885
+ nine="ninth",
1886
+ twelve="twelfth",
1887
+ )
1888
+
1889
+ ordinal_suff = re.compile(rf"({'|'.join(ordinal)})\Z")
1890
+
1891
+
1892
+ # NUMBERS
1893
+
1894
+ unit = ["", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine"]
1895
+ teen = [
1896
+ "ten",
1897
+ "eleven",
1898
+ "twelve",
1899
+ "thirteen",
1900
+ "fourteen",
1901
+ "fifteen",
1902
+ "sixteen",
1903
+ "seventeen",
1904
+ "eighteen",
1905
+ "nineteen",
1906
+ ]
1907
+ ten = [
1908
+ "",
1909
+ "",
1910
+ "twenty",
1911
+ "thirty",
1912
+ "forty",
1913
+ "fifty",
1914
+ "sixty",
1915
+ "seventy",
1916
+ "eighty",
1917
+ "ninety",
1918
+ ]
1919
+ mill = [
1920
+ " ",
1921
+ " thousand",
1922
+ " million",
1923
+ " billion",
1924
+ " trillion",
1925
+ " quadrillion",
1926
+ " quintillion",
1927
+ " sextillion",
1928
+ " septillion",
1929
+ " octillion",
1930
+ " nonillion",
1931
+ " decillion",
1932
+ ]
1933
+
1934
+
1935
+ # SUPPORT CLASSICAL PLURALIZATIONS
1936
+
1937
+ def_classical = dict(
1938
+ all=False, zero=False, herd=False, names=True, persons=False, ancient=False
1939
+ )
1940
+
1941
+ all_classical = {k: True for k in def_classical}
1942
+ no_classical = {k: False for k in def_classical}
1943
+
1944
+
1945
+ # Maps strings to built-in constant types
1946
+ string_to_constant = {"True": True, "False": False, "None": None}
1947
+
1948
+
1949
+ # Pre-compiled regular expression objects
1950
+ DOLLAR_DIGITS = re.compile(r"\$(\d+)")
1951
+ FUNCTION_CALL = re.compile(r"((\w+)\([^)]*\)*)", re.IGNORECASE)
1952
+ PARTITION_WORD = re.compile(r"\A(\s*)(.+?)(\s*)\Z")
1953
+ PL_SB_POSTFIX_ADJ_STEMS_RE = re.compile(
1954
+ rf"^(?:{pl_sb_postfix_adj_stems})$", re.IGNORECASE
1955
+ )
1956
+ PL_SB_PREP_DUAL_COMPOUND_RE = re.compile(
1957
+ rf"^(?:{pl_sb_prep_dual_compound})$", re.IGNORECASE
1958
+ )
1959
+ DENOMINATOR = re.compile(r"(?P<denominator>.+)( (per|a) .+)")
1960
+ PLVERB_SPECIAL_S_RE = re.compile(rf"^({plverb_special_s})$")
1961
+ WHITESPACE = re.compile(r"\s")
1962
+ ENDS_WITH_S = re.compile(r"^(.*[^s])s$", re.IGNORECASE)
1963
+ ENDS_WITH_APOSTROPHE_S = re.compile(r"^(.*)'s?$")
1964
+ INDEFINITE_ARTICLE_TEST = re.compile(r"\A(\s*)(?:an?\s+)?(.+?)(\s*)\Z", re.IGNORECASE)
1965
+ SPECIAL_AN = re.compile(r"^[aefhilmnorsx]$", re.IGNORECASE)
1966
+ SPECIAL_A = re.compile(r"^[bcdgjkpqtuvwyz]$", re.IGNORECASE)
1967
+ SPECIAL_ABBREV_AN = re.compile(r"^[aefhilmnorsx][.-]", re.IGNORECASE)
1968
+ SPECIAL_ABBREV_A = re.compile(r"^[a-z][.-]", re.IGNORECASE)
1969
+ CONSONANTS = re.compile(r"^[^aeiouy]", re.IGNORECASE)
1970
+ ARTICLE_SPECIAL_EU = re.compile(r"^e[uw]", re.IGNORECASE)
1971
+ ARTICLE_SPECIAL_ONCE = re.compile(r"^onc?e\b", re.IGNORECASE)
1972
+ ARTICLE_SPECIAL_ONETIME = re.compile(r"^onetime\b", re.IGNORECASE)
1973
+ ARTICLE_SPECIAL_UNIT = re.compile(r"^uni([^nmd]|mo)", re.IGNORECASE)
1974
+ ARTICLE_SPECIAL_UBA = re.compile(r"^u[bcfghjkqrst][aeiou]", re.IGNORECASE)
1975
+ ARTICLE_SPECIAL_UKR = re.compile(r"^ukr", re.IGNORECASE)
1976
+ SPECIAL_CAPITALS = re.compile(r"^U[NK][AIEO]?")
1977
+ VOWELS = re.compile(r"^[aeiou]", re.IGNORECASE)
1978
+
1979
+ DIGIT_GROUP = re.compile(r"(\d)")
1980
+ TWO_DIGITS = re.compile(r"(\d)(\d)")
1981
+ THREE_DIGITS = re.compile(r"(\d)(\d)(\d)")
1982
+ THREE_DIGITS_WORD = re.compile(r"(\d)(\d)(\d)(?=\D*\Z)")
1983
+ TWO_DIGITS_WORD = re.compile(r"(\d)(\d)(?=\D*\Z)")
1984
+ ONE_DIGIT_WORD = re.compile(r"(\d)(?=\D*\Z)")
1985
+
1986
+ FOUR_DIGIT_COMMA = re.compile(r"(\d)(\d{3}(?:,|\Z))")
1987
+ NON_DIGIT = re.compile(r"\D")
1988
+ WHITESPACES_COMMA = re.compile(r"\s+,")
1989
+ COMMA_WORD = re.compile(r", (\S+)\s+\Z")
1990
+ WHITESPACES = re.compile(r"\s+")
1991
+
1992
+
1993
+ PRESENT_PARTICIPLE_REPLACEMENTS = (
1994
+ (re.compile(r"ie$"), r"y"),
1995
+ (
1996
+ re.compile(r"ue$"),
1997
+ r"u",
1998
+ ), # TODO: isn't ue$ -> u encompassed in the following rule?
1999
+ (re.compile(r"([auy])e$"), r"\g<1>"),
2000
+ (re.compile(r"ski$"), r"ski"),
2001
+ (re.compile(r"[^b]i$"), r""),
2002
+ (re.compile(r"^(are|were)$"), r"be"),
2003
+ (re.compile(r"^(had)$"), r"hav"),
2004
+ (re.compile(r"^(hoe)$"), r"\g<1>"),
2005
+ (re.compile(r"([^e])e$"), r"\g<1>"),
2006
+ (re.compile(r"er$"), r"er"),
2007
+ (re.compile(r"([^aeiou][aeiouy]([bdgmnprst]))$"), r"\g<1>\g<2>"),
2008
+ )
2009
+
2010
+ DIGIT = re.compile(r"\d")
2011
+
2012
+
2013
+ class Words(str):
2014
+ lowered: str
2015
+ split_: List[str]
2016
+ first: str
2017
+ last: str
2018
+
2019
+ def __init__(self, orig) -> None:
2020
+ self.lowered = self.lower()
2021
+ self.split_ = self.split()
2022
+ self.first = self.split_[0]
2023
+ self.last = self.split_[-1]
2024
+
2025
+
2026
+ Falsish = Any # ideally, falsish would only validate on bool(value) is False
2027
+
2028
+
2029
+ _STATIC_TYPE_CHECKING = TYPE_CHECKING
2030
+ # ^-- Workaround for typeguard AST manipulation:
2031
+ # https://github.com/agronholm/typeguard/issues/353#issuecomment-1556306554
2032
+
2033
+ if _STATIC_TYPE_CHECKING: # pragma: no cover
2034
+ Word = Annotated[str, "String with at least 1 character"]
2035
+ else:
2036
+
2037
+ class _WordMeta(type): # Too dynamic to be supported by mypy...
2038
+ def __instancecheck__(self, instance: Any) -> bool:
2039
+ return isinstance(instance, str) and len(instance) >= 1
2040
+
2041
+ class Word(metaclass=_WordMeta): # type: ignore[no-redef]
2042
+ """String with at least 1 character"""
2043
+
2044
+
2045
+ class engine:
2046
+ def __init__(self) -> None:
2047
+ self.classical_dict = def_classical.copy()
2048
+ self.persistent_count: Optional[int] = None
2049
+ self.mill_count = 0
2050
+ self.pl_sb_user_defined: List[Optional[Word]] = []
2051
+ self.pl_v_user_defined: List[Optional[Word]] = []
2052
+ self.pl_adj_user_defined: List[Optional[Word]] = []
2053
+ self.si_sb_user_defined: List[Optional[Word]] = []
2054
+ self.A_a_user_defined: List[Optional[Word]] = []
2055
+ self.thegender = "neuter"
2056
+ self.__number_args: Optional[Dict[str, str]] = None
2057
+
2058
+ @property
2059
+ def _number_args(self):
2060
+ return cast(Dict[str, str], self.__number_args)
2061
+
2062
+ @_number_args.setter
2063
+ def _number_args(self, val):
2064
+ self.__number_args = val
2065
+
2066
+ @typechecked
2067
+ def defnoun(self, singular: Optional[Word], plural: Optional[Word]) -> int:
2068
+ """
2069
+ Set the noun plural of singular to plural.
2070
+
2071
+ """
2072
+ self.checkpat(singular)
2073
+ self.checkpatplural(plural)
2074
+ self.pl_sb_user_defined.extend((singular, plural))
2075
+ self.si_sb_user_defined.extend((plural, singular))
2076
+ return 1
2077
+
2078
+ @typechecked
2079
+ def defverb(
2080
+ self,
2081
+ s1: Optional[Word],
2082
+ p1: Optional[Word],
2083
+ s2: Optional[Word],
2084
+ p2: Optional[Word],
2085
+ s3: Optional[Word],
2086
+ p3: Optional[Word],
2087
+ ) -> int:
2088
+ """
2089
+ Set the verb plurals for s1, s2 and s3 to p1, p2 and p3 respectively.
2090
+
2091
+ Where 1, 2 and 3 represent the 1st, 2nd and 3rd person forms of the verb.
2092
+
2093
+ """
2094
+ self.checkpat(s1)
2095
+ self.checkpat(s2)
2096
+ self.checkpat(s3)
2097
+ self.checkpatplural(p1)
2098
+ self.checkpatplural(p2)
2099
+ self.checkpatplural(p3)
2100
+ self.pl_v_user_defined.extend((s1, p1, s2, p2, s3, p3))
2101
+ return 1
2102
+
2103
+ @typechecked
2104
+ def defadj(self, singular: Optional[Word], plural: Optional[Word]) -> int:
2105
+ """
2106
+ Set the adjective plural of singular to plural.
2107
+
2108
+ """
2109
+ self.checkpat(singular)
2110
+ self.checkpatplural(plural)
2111
+ self.pl_adj_user_defined.extend((singular, plural))
2112
+ return 1
2113
+
2114
+ @typechecked
2115
+ def defa(self, pattern: Optional[Word]) -> int:
2116
+ """
2117
+ Define the indefinite article as 'a' for words matching pattern.
2118
+
2119
+ """
2120
+ self.checkpat(pattern)
2121
+ self.A_a_user_defined.extend((pattern, "a"))
2122
+ return 1
2123
+
2124
+ @typechecked
2125
+ def defan(self, pattern: Optional[Word]) -> int:
2126
+ """
2127
+ Define the indefinite article as 'an' for words matching pattern.
2128
+
2129
+ """
2130
+ self.checkpat(pattern)
2131
+ self.A_a_user_defined.extend((pattern, "an"))
2132
+ return 1
2133
+
2134
+ def checkpat(self, pattern: Optional[Word]) -> None:
2135
+ """
2136
+ check for errors in a regex pattern
2137
+ """
2138
+ if pattern is None:
2139
+ return
2140
+ try:
2141
+ re.match(pattern, "")
2142
+ except re.error as err:
2143
+ raise BadUserDefinedPatternError(pattern) from err
2144
+
2145
+ def checkpatplural(self, pattern: Optional[Word]) -> None:
2146
+ """
2147
+ check for errors in a regex replace pattern
2148
+ """
2149
+ return
2150
+
2151
+ @typechecked
2152
+ def ud_match(self, word: Word, wordlist: Sequence[Optional[Word]]) -> Optional[str]:
2153
+ for i in range(len(wordlist) - 2, -2, -2): # backwards through even elements
2154
+ mo = re.search(rf"^{wordlist[i]}$", word, re.IGNORECASE)
2155
+ if mo:
2156
+ if wordlist[i + 1] is None:
2157
+ return None
2158
+ pl = DOLLAR_DIGITS.sub(
2159
+ r"\\1", cast(Word, wordlist[i + 1])
2160
+ ) # change $n to \n for expand
2161
+ return mo.expand(pl)
2162
+ return None
2163
+
2164
+ def classical(self, **kwargs) -> None:
2165
+ """
2166
+ turn classical mode on and off for various categories
2167
+
2168
+ turn on all classical modes:
2169
+ classical()
2170
+ classical(all=True)
2171
+
2172
+ turn on or off specific claassical modes:
2173
+ e.g.
2174
+ classical(herd=True)
2175
+ classical(names=False)
2176
+
2177
+ By default all classical modes are off except names.
2178
+
2179
+ unknown value in args or key in kwargs raises
2180
+ exception: UnknownClasicalModeError
2181
+
2182
+ """
2183
+ if not kwargs:
2184
+ self.classical_dict = all_classical.copy()
2185
+ return
2186
+ if "all" in kwargs:
2187
+ if kwargs["all"]:
2188
+ self.classical_dict = all_classical.copy()
2189
+ else:
2190
+ self.classical_dict = no_classical.copy()
2191
+
2192
+ for k, v in kwargs.items():
2193
+ if k in def_classical:
2194
+ self.classical_dict[k] = v
2195
+ else:
2196
+ raise UnknownClassicalModeError
2197
+
2198
+ def num(
2199
+ self, count: Optional[int] = None, show: Optional[int] = None
2200
+ ) -> str: # (;$count,$show)
2201
+ """
2202
+ Set the number to be used in other method calls.
2203
+
2204
+ Returns count.
2205
+
2206
+ Set show to False to return '' instead.
2207
+
2208
+ """
2209
+ if count is not None:
2210
+ try:
2211
+ self.persistent_count = int(count)
2212
+ except ValueError as err:
2213
+ raise BadNumValueError from err
2214
+ if (show is None) or show:
2215
+ return str(count)
2216
+ else:
2217
+ self.persistent_count = None
2218
+ return ""
2219
+
2220
+ def gender(self, gender: str) -> None:
2221
+ """
2222
+ set the gender for the singular of plural pronouns
2223
+
2224
+ can be one of:
2225
+ 'neuter' ('they' -> 'it')
2226
+ 'feminine' ('they' -> 'she')
2227
+ 'masculine' ('they' -> 'he')
2228
+ 'gender-neutral' ('they' -> 'they')
2229
+ 'feminine or masculine' ('they' -> 'she or he')
2230
+ 'masculine or feminine' ('they' -> 'he or she')
2231
+ """
2232
+ if gender in singular_pronoun_genders:
2233
+ self.thegender = gender
2234
+ else:
2235
+ raise BadGenderError
2236
+
2237
+ def _get_value_from_ast(self, obj):
2238
+ """
2239
+ Return the value of the ast object.
2240
+ """
2241
+ if isinstance(obj, ast.Num):
2242
+ return obj.n
2243
+ elif isinstance(obj, ast.Str):
2244
+ return obj.s
2245
+ elif isinstance(obj, ast.List):
2246
+ return [self._get_value_from_ast(e) for e in obj.elts]
2247
+ elif isinstance(obj, ast.Tuple):
2248
+ return tuple([self._get_value_from_ast(e) for e in obj.elts])
2249
+
2250
+ # None, True and False are NameConstants in Py3.4 and above.
2251
+ elif isinstance(obj, ast.NameConstant):
2252
+ return obj.value
2253
+
2254
+ # Probably passed a variable name.
2255
+ # Or passed a single word without wrapping it in quotes as an argument
2256
+ # ex: p.inflect("I plural(see)") instead of p.inflect("I plural('see')")
2257
+ raise NameError(f"name '{obj.id}' is not defined")
2258
+
2259
+ def _string_to_substitute(
2260
+ self, mo: Match, methods_dict: Dict[str, Callable]
2261
+ ) -> str:
2262
+ """
2263
+ Return the string to be substituted for the match.
2264
+ """
2265
+ matched_text, f_name = mo.groups()
2266
+ # matched_text is the complete match string. e.g. plural_noun(cat)
2267
+ # f_name is the function name. e.g. plural_noun
2268
+
2269
+ # Return matched_text if function name is not in methods_dict
2270
+ if f_name not in methods_dict:
2271
+ return matched_text
2272
+
2273
+ # Parse the matched text
2274
+ a_tree = ast.parse(matched_text)
2275
+
2276
+ # get the args and kwargs from ast objects
2277
+ args_list = [
2278
+ self._get_value_from_ast(a)
2279
+ for a in a_tree.body[0].value.args # type: ignore[attr-defined]
2280
+ ]
2281
+ kwargs_list = {
2282
+ kw.arg: self._get_value_from_ast(kw.value)
2283
+ for kw in a_tree.body[0].value.keywords # type: ignore[attr-defined]
2284
+ }
2285
+
2286
+ # Call the corresponding function
2287
+ return methods_dict[f_name](*args_list, **kwargs_list)
2288
+
2289
+ # 0. PERFORM GENERAL INFLECTIONS IN A STRING
2290
+
2291
+ @typechecked
2292
+ def inflect(self, text: Word) -> str:
2293
+ """
2294
+ Perform inflections in a string.
2295
+
2296
+ e.g. inflect('The plural of cat is plural(cat)') returns
2297
+ 'The plural of cat is cats'
2298
+
2299
+ can use plural, plural_noun, plural_verb, plural_adj,
2300
+ singular_noun, a, an, no, ordinal, number_to_words,
2301
+ and prespart
2302
+
2303
+ """
2304
+ save_persistent_count = self.persistent_count
2305
+
2306
+ # Dictionary of allowed methods
2307
+ methods_dict: Dict[str, Callable] = {
2308
+ "plural": self.plural,
2309
+ "plural_adj": self.plural_adj,
2310
+ "plural_noun": self.plural_noun,
2311
+ "plural_verb": self.plural_verb,
2312
+ "singular_noun": self.singular_noun,
2313
+ "a": self.a,
2314
+ "an": self.a,
2315
+ "no": self.no,
2316
+ "ordinal": self.ordinal,
2317
+ "number_to_words": self.number_to_words,
2318
+ "present_participle": self.present_participle,
2319
+ "num": self.num,
2320
+ }
2321
+
2322
+ # Regular expression to find Python's function call syntax
2323
+ output = FUNCTION_CALL.sub(
2324
+ lambda mo: self._string_to_substitute(mo, methods_dict), text
2325
+ )
2326
+ self.persistent_count = save_persistent_count
2327
+ return output
2328
+
2329
+ # ## PLURAL SUBROUTINES
2330
+
2331
+ def postprocess(self, orig: str, inflected) -> str:
2332
+ inflected = str(inflected)
2333
+ if "|" in inflected:
2334
+ word_options = inflected.split("|")
2335
+ # When two parts of a noun need to be pluralized
2336
+ if len(word_options[0].split(" ")) == len(word_options[1].split(" ")):
2337
+ result = inflected.split("|")[self.classical_dict["all"]].split(" ")
2338
+ # When only the last part of the noun needs to be pluralized
2339
+ else:
2340
+ result = inflected.split(" ")
2341
+ for index, word in enumerate(result):
2342
+ if "|" in word:
2343
+ result[index] = word.split("|")[self.classical_dict["all"]]
2344
+ else:
2345
+ result = inflected.split(" ")
2346
+
2347
+ # Try to fix word wise capitalization
2348
+ for index, word in enumerate(orig.split(" ")):
2349
+ if word == "I":
2350
+ # Is this the only word for exceptions like this
2351
+ # Where the original is fully capitalized
2352
+ # without 'meaning' capitalization?
2353
+ # Also this fails to handle a capitalizaion in context
2354
+ continue
2355
+ if word.capitalize() == word:
2356
+ result[index] = result[index].capitalize()
2357
+ if word == word.upper():
2358
+ result[index] = result[index].upper()
2359
+ return " ".join(result)
2360
+
2361
+ def partition_word(self, text: str) -> Tuple[str, str, str]:
2362
+ mo = PARTITION_WORD.search(text)
2363
+ if mo:
2364
+ return mo.group(1), mo.group(2), mo.group(3)
2365
+ else:
2366
+ return "", "", ""
2367
+
2368
+ @typechecked
2369
+ def plural(self, text: Word, count: Optional[Union[str, int, Any]] = None) -> str:
2370
+ """
2371
+ Return the plural of text.
2372
+
2373
+ If count supplied, then return text if count is one of:
2374
+ 1, a, an, one, each, every, this, that
2375
+
2376
+ otherwise return the plural.
2377
+
2378
+ Whitespace at the start and end is preserved.
2379
+
2380
+ """
2381
+ pre, word, post = self.partition_word(text)
2382
+ if not word:
2383
+ return text
2384
+ plural = self.postprocess(
2385
+ word,
2386
+ self._pl_special_adjective(word, count)
2387
+ or self._pl_special_verb(word, count)
2388
+ or self._plnoun(word, count),
2389
+ )
2390
+ return f"{pre}{plural}{post}"
2391
+
2392
+ @typechecked
2393
+ def plural_noun(
2394
+ self, text: Word, count: Optional[Union[str, int, Any]] = None
2395
+ ) -> str:
2396
+ """
2397
+ Return the plural of text, where text is a noun.
2398
+
2399
+ If count supplied, then return text if count is one of:
2400
+ 1, a, an, one, each, every, this, that
2401
+
2402
+ otherwise return the plural.
2403
+
2404
+ Whitespace at the start and end is preserved.
2405
+
2406
+ """
2407
+ pre, word, post = self.partition_word(text)
2408
+ if not word:
2409
+ return text
2410
+ plural = self.postprocess(word, self._plnoun(word, count))
2411
+ return f"{pre}{plural}{post}"
2412
+
2413
+ @typechecked
2414
+ def plural_verb(
2415
+ self, text: Word, count: Optional[Union[str, int, Any]] = None
2416
+ ) -> str:
2417
+ """
2418
+ Return the plural of text, where text is a verb.
2419
+
2420
+ If count supplied, then return text if count is one of:
2421
+ 1, a, an, one, each, every, this, that
2422
+
2423
+ otherwise return the plural.
2424
+
2425
+ Whitespace at the start and end is preserved.
2426
+
2427
+ """
2428
+ pre, word, post = self.partition_word(text)
2429
+ if not word:
2430
+ return text
2431
+ plural = self.postprocess(
2432
+ word,
2433
+ self._pl_special_verb(word, count) or self._pl_general_verb(word, count),
2434
+ )
2435
+ return f"{pre}{plural}{post}"
2436
+
2437
+ @typechecked
2438
+ def plural_adj(
2439
+ self, text: Word, count: Optional[Union[str, int, Any]] = None
2440
+ ) -> str:
2441
+ """
2442
+ Return the plural of text, where text is an adjective.
2443
+
2444
+ If count supplied, then return text if count is one of:
2445
+ 1, a, an, one, each, every, this, that
2446
+
2447
+ otherwise return the plural.
2448
+
2449
+ Whitespace at the start and end is preserved.
2450
+
2451
+ """
2452
+ pre, word, post = self.partition_word(text)
2453
+ if not word:
2454
+ return text
2455
+ plural = self.postprocess(word, self._pl_special_adjective(word, count) or word)
2456
+ return f"{pre}{plural}{post}"
2457
+
2458
+ @typechecked
2459
+ def compare(self, word1: Word, word2: Word) -> Union[str, bool]:
2460
+ """
2461
+ compare word1 and word2 for equality regardless of plurality
2462
+
2463
+ return values:
2464
+ eq - the strings are equal
2465
+ p:s - word1 is the plural of word2
2466
+ s:p - word2 is the plural of word1
2467
+ p:p - word1 and word2 are two different plural forms of the one word
2468
+ False - otherwise
2469
+
2470
+ >>> compare = engine().compare
2471
+ >>> compare("egg", "eggs")
2472
+ 's:p'
2473
+ >>> compare('egg', 'egg')
2474
+ 'eq'
2475
+
2476
+ Words should not be empty.
2477
+
2478
+ >>> compare('egg', '')
2479
+ Traceback (most recent call last):
2480
+ ...
2481
+ typeguard.TypeCheckError:...is not an instance of inflect.Word
2482
+ """
2483
+ norms = self.plural_noun, self.plural_verb, self.plural_adj
2484
+ results = (self._plequal(word1, word2, norm) for norm in norms)
2485
+ return next(filter(None, results), False)
2486
+
2487
+ @typechecked
2488
+ def compare_nouns(self, word1: Word, word2: Word) -> Union[str, bool]:
2489
+ """
2490
+ compare word1 and word2 for equality regardless of plurality
2491
+ word1 and word2 are to be treated as nouns
2492
+
2493
+ return values:
2494
+ eq - the strings are equal
2495
+ p:s - word1 is the plural of word2
2496
+ s:p - word2 is the plural of word1
2497
+ p:p - word1 and word2 are two different plural forms of the one word
2498
+ False - otherwise
2499
+
2500
+ """
2501
+ return self._plequal(word1, word2, self.plural_noun)
2502
+
2503
+ @typechecked
2504
+ def compare_verbs(self, word1: Word, word2: Word) -> Union[str, bool]:
2505
+ """
2506
+ compare word1 and word2 for equality regardless of plurality
2507
+ word1 and word2 are to be treated as verbs
2508
+
2509
+ return values:
2510
+ eq - the strings are equal
2511
+ p:s - word1 is the plural of word2
2512
+ s:p - word2 is the plural of word1
2513
+ p:p - word1 and word2 are two different plural forms of the one word
2514
+ False - otherwise
2515
+
2516
+ """
2517
+ return self._plequal(word1, word2, self.plural_verb)
2518
+
2519
+ @typechecked
2520
+ def compare_adjs(self, word1: Word, word2: Word) -> Union[str, bool]:
2521
+ """
2522
+ compare word1 and word2 for equality regardless of plurality
2523
+ word1 and word2 are to be treated as adjectives
2524
+
2525
+ return values:
2526
+ eq - the strings are equal
2527
+ p:s - word1 is the plural of word2
2528
+ s:p - word2 is the plural of word1
2529
+ p:p - word1 and word2 are two different plural forms of the one word
2530
+ False - otherwise
2531
+
2532
+ """
2533
+ return self._plequal(word1, word2, self.plural_adj)
2534
+
2535
+ @typechecked
2536
+ def singular_noun(
2537
+ self,
2538
+ text: Word,
2539
+ count: Optional[Union[int, str, Any]] = None,
2540
+ gender: Optional[str] = None,
2541
+ ) -> Union[str, Literal[False]]:
2542
+ """
2543
+ Return the singular of text, where text is a plural noun.
2544
+
2545
+ If count supplied, then return the singular if count is one of:
2546
+ 1, a, an, one, each, every, this, that or if count is None
2547
+
2548
+ otherwise return text unchanged.
2549
+
2550
+ Whitespace at the start and end is preserved.
2551
+
2552
+ >>> p = engine()
2553
+ >>> p.singular_noun('horses')
2554
+ 'horse'
2555
+ >>> p.singular_noun('knights')
2556
+ 'knight'
2557
+
2558
+ Returns False when a singular noun is passed.
2559
+
2560
+ >>> p.singular_noun('horse')
2561
+ False
2562
+ >>> p.singular_noun('knight')
2563
+ False
2564
+ >>> p.singular_noun('soldier')
2565
+ False
2566
+
2567
+ """
2568
+ pre, word, post = self.partition_word(text)
2569
+ if not word:
2570
+ return text
2571
+ sing = self._sinoun(word, count=count, gender=gender)
2572
+ if sing is not False:
2573
+ plural = self.postprocess(word, sing)
2574
+ return f"{pre}{plural}{post}"
2575
+ return False
2576
+
2577
+ def _plequal(self, word1: str, word2: str, pl) -> Union[str, bool]: # noqa: C901
2578
+ classval = self.classical_dict.copy()
2579
+ self.classical_dict = all_classical.copy()
2580
+ if word1 == word2:
2581
+ return "eq"
2582
+ if word1 == pl(word2):
2583
+ return "p:s"
2584
+ if pl(word1) == word2:
2585
+ return "s:p"
2586
+ self.classical_dict = no_classical.copy()
2587
+ if word1 == pl(word2):
2588
+ return "p:s"
2589
+ if pl(word1) == word2:
2590
+ return "s:p"
2591
+ self.classical_dict = classval.copy()
2592
+
2593
+ if pl == self.plural or pl == self.plural_noun:
2594
+ if self._pl_check_plurals_N(word1, word2):
2595
+ return "p:p"
2596
+ if self._pl_check_plurals_N(word2, word1):
2597
+ return "p:p"
2598
+ if pl == self.plural or pl == self.plural_adj:
2599
+ if self._pl_check_plurals_adj(word1, word2):
2600
+ return "p:p"
2601
+ return False
2602
+
2603
+ def _pl_reg_plurals(self, pair: str, stems: str, end1: str, end2: str) -> bool:
2604
+ pattern = rf"({stems})({end1}\|\1{end2}|{end2}\|\1{end1})"
2605
+ return bool(re.search(pattern, pair))
2606
+
2607
+ def _pl_check_plurals_N(self, word1: str, word2: str) -> bool:
2608
+ stem_endings = (
2609
+ (pl_sb_C_a_ata, "as", "ata"),
2610
+ (pl_sb_C_is_ides, "is", "ides"),
2611
+ (pl_sb_C_a_ae, "s", "e"),
2612
+ (pl_sb_C_en_ina, "ens", "ina"),
2613
+ (pl_sb_C_um_a, "ums", "a"),
2614
+ (pl_sb_C_us_i, "uses", "i"),
2615
+ (pl_sb_C_on_a, "ons", "a"),
2616
+ (pl_sb_C_o_i_stems, "os", "i"),
2617
+ (pl_sb_C_ex_ices, "exes", "ices"),
2618
+ (pl_sb_C_ix_ices, "ixes", "ices"),
2619
+ (pl_sb_C_i, "s", "i"),
2620
+ (pl_sb_C_im, "s", "im"),
2621
+ (".*eau", "s", "x"),
2622
+ (".*ieu", "s", "x"),
2623
+ (".*tri", "xes", "ces"),
2624
+ (".{2,}[yia]n", "xes", "ges"),
2625
+ )
2626
+
2627
+ words = map(Words, (word1, word2))
2628
+ pair = "|".join(word.last for word in words)
2629
+
2630
+ return (
2631
+ pair in pl_sb_irregular_s.values()
2632
+ or pair in pl_sb_irregular.values()
2633
+ or pair in pl_sb_irregular_caps.values()
2634
+ or any(
2635
+ self._pl_reg_plurals(pair, stems, end1, end2)
2636
+ for stems, end1, end2 in stem_endings
2637
+ )
2638
+ )
2639
+
2640
+ def _pl_check_plurals_adj(self, word1: str, word2: str) -> bool:
2641
+ word1a = word1[: word1.rfind("'")] if word1.endswith(("'s", "'")) else ""
2642
+ word2a = word2[: word2.rfind("'")] if word2.endswith(("'s", "'")) else ""
2643
+
2644
+ return (
2645
+ bool(word1a)
2646
+ and bool(word2a)
2647
+ and (
2648
+ self._pl_check_plurals_N(word1a, word2a)
2649
+ or self._pl_check_plurals_N(word2a, word1a)
2650
+ )
2651
+ )
2652
+
2653
+ def get_count(self, count: Optional[Union[str, int]] = None) -> Union[str, int]:
2654
+ if count is None and self.persistent_count is not None:
2655
+ count = self.persistent_count
2656
+
2657
+ if count is not None:
2658
+ count = (
2659
+ 1
2660
+ if (
2661
+ (str(count) in pl_count_one)
2662
+ or (
2663
+ self.classical_dict["zero"]
2664
+ and str(count).lower() in pl_count_zero
2665
+ )
2666
+ )
2667
+ else 2
2668
+ )
2669
+ else:
2670
+ count = ""
2671
+ return count
2672
+
2673
+ # @profile
2674
+ def _plnoun( # noqa: C901
2675
+ self, word: str, count: Optional[Union[str, int]] = None
2676
+ ) -> str:
2677
+ count = self.get_count(count)
2678
+
2679
+ # DEFAULT TO PLURAL
2680
+
2681
+ if count == 1:
2682
+ return word
2683
+
2684
+ # HANDLE USER-DEFINED NOUNS
2685
+
2686
+ value = self.ud_match(word, self.pl_sb_user_defined)
2687
+ if value is not None:
2688
+ return value
2689
+
2690
+ # HANDLE EMPTY WORD, SINGULAR COUNT AND UNINFLECTED PLURALS
2691
+
2692
+ if word == "":
2693
+ return word
2694
+
2695
+ word = Words(word)
2696
+
2697
+ if word.last.lower() in pl_sb_uninflected_complete:
2698
+ if len(word.split_) >= 3:
2699
+ return self._handle_long_compounds(word, count=2) or word
2700
+ return word
2701
+
2702
+ if word in pl_sb_uninflected_caps:
2703
+ return word
2704
+
2705
+ for k, v in pl_sb_uninflected_bysize.items():
2706
+ if word.lowered[-k:] in v:
2707
+ return word
2708
+
2709
+ if self.classical_dict["herd"] and word.last.lower() in pl_sb_uninflected_herd:
2710
+ return word
2711
+
2712
+ # HANDLE COMPOUNDS ("Governor General", "mother-in-law", "aide-de-camp", ETC.)
2713
+
2714
+ mo = PL_SB_POSTFIX_ADJ_STEMS_RE.search(word)
2715
+ if mo and mo.group(2) != "":
2716
+ return f"{self._plnoun(mo.group(1), 2)}{mo.group(2)}"
2717
+
2718
+ if " a " in word.lowered or "-a-" in word.lowered:
2719
+ mo = PL_SB_PREP_DUAL_COMPOUND_RE.search(word)
2720
+ if mo and mo.group(2) != "" and mo.group(3) != "":
2721
+ return (
2722
+ f"{self._plnoun(mo.group(1), 2)}"
2723
+ f"{mo.group(2)}"
2724
+ f"{self._plnoun(mo.group(3))}"
2725
+ )
2726
+
2727
+ if len(word.split_) >= 3:
2728
+ handled_words = self._handle_long_compounds(word, count=2)
2729
+ if handled_words is not None:
2730
+ return handled_words
2731
+
2732
+ # only pluralize denominators in units
2733
+ mo = DENOMINATOR.search(word.lowered)
2734
+ if mo:
2735
+ index = len(mo.group("denominator"))
2736
+ return f"{self._plnoun(word[:index])}{word[index:]}"
2737
+
2738
+ # handle units given in degrees (only accept if
2739
+ # there is no more than one word following)
2740
+ # degree Celsius => degrees Celsius but degree
2741
+ # fahrenheit hour => degree fahrenheit hours
2742
+ if len(word.split_) >= 2 and word.split_[-2] == "degree":
2743
+ return " ".join([self._plnoun(word.first)] + word.split_[1:])
2744
+
2745
+ with contextlib.suppress(ValueError):
2746
+ return self._handle_prepositional_phrase(
2747
+ word.lowered,
2748
+ functools.partial(self._plnoun, count=2),
2749
+ '-',
2750
+ )
2751
+
2752
+ # HANDLE PRONOUNS
2753
+
2754
+ for k, v in pl_pron_acc_keys_bysize.items():
2755
+ if word.lowered[-k:] in v: # ends with accusative pronoun
2756
+ for pk, pv in pl_prep_bysize.items():
2757
+ if word.lowered[:pk] in pv: # starts with a prep
2758
+ if word.lowered.split() == [
2759
+ word.lowered[:pk],
2760
+ word.lowered[-k:],
2761
+ ]:
2762
+ # only whitespace in between
2763
+ return word.lowered[:-k] + pl_pron_acc[word.lowered[-k:]]
2764
+
2765
+ try:
2766
+ return pl_pron_nom[word.lowered]
2767
+ except KeyError:
2768
+ pass
2769
+
2770
+ try:
2771
+ return pl_pron_acc[word.lowered]
2772
+ except KeyError:
2773
+ pass
2774
+
2775
+ # HANDLE ISOLATED IRREGULAR PLURALS
2776
+
2777
+ if word.last in pl_sb_irregular_caps:
2778
+ llen = len(word.last)
2779
+ return f"{word[:-llen]}{pl_sb_irregular_caps[word.last]}"
2780
+
2781
+ lowered_last = word.last.lower()
2782
+ if lowered_last in pl_sb_irregular:
2783
+ llen = len(lowered_last)
2784
+ return f"{word[:-llen]}{pl_sb_irregular[lowered_last]}"
2785
+
2786
+ dash_split = word.lowered.split('-')
2787
+ if (" ".join(dash_split[-2:])).lower() in pl_sb_irregular_compound:
2788
+ llen = len(
2789
+ " ".join(dash_split[-2:])
2790
+ ) # TODO: what if 2 spaces between these words?
2791
+ return (
2792
+ f"{word[:-llen]}"
2793
+ f"{pl_sb_irregular_compound[(' '.join(dash_split[-2:])).lower()]}"
2794
+ )
2795
+
2796
+ if word.lowered[-3:] == "quy":
2797
+ return f"{word[:-1]}ies"
2798
+
2799
+ if word.lowered[-6:] == "person":
2800
+ if self.classical_dict["persons"]:
2801
+ return f"{word}s"
2802
+ else:
2803
+ return f"{word[:-4]}ople"
2804
+
2805
+ # HANDLE FAMILIES OF IRREGULAR PLURALS
2806
+
2807
+ if word.lowered[-3:] == "man":
2808
+ for k, v in pl_sb_U_man_mans_bysize.items():
2809
+ if word.lowered[-k:] in v:
2810
+ return f"{word}s"
2811
+ for k, v in pl_sb_U_man_mans_caps_bysize.items():
2812
+ if word[-k:] in v:
2813
+ return f"{word}s"
2814
+ return f"{word[:-3]}men"
2815
+ if word.lowered[-5:] == "mouse":
2816
+ return f"{word[:-5]}mice"
2817
+ if word.lowered[-5:] == "louse":
2818
+ v = pl_sb_U_louse_lice_bysize.get(len(word))
2819
+ if v and word.lowered in v:
2820
+ return f"{word[:-5]}lice"
2821
+ return f"{word}s"
2822
+ if word.lowered[-5:] == "goose":
2823
+ return f"{word[:-5]}geese"
2824
+ if word.lowered[-5:] == "tooth":
2825
+ return f"{word[:-5]}teeth"
2826
+ if word.lowered[-4:] == "foot":
2827
+ return f"{word[:-4]}feet"
2828
+ if word.lowered[-4:] == "taco":
2829
+ return f"{word[:-5]}tacos"
2830
+
2831
+ if word.lowered == "die":
2832
+ return "dice"
2833
+
2834
+ # HANDLE UNASSIMILATED IMPORTS
2835
+
2836
+ if word.lowered[-4:] == "ceps":
2837
+ return word
2838
+ if word.lowered[-4:] == "zoon":
2839
+ return f"{word[:-2]}a"
2840
+ if word.lowered[-3:] in ("cis", "sis", "xis"):
2841
+ return f"{word[:-2]}es"
2842
+
2843
+ for lastlet, d, numend, post in (
2844
+ ("h", pl_sb_U_ch_chs_bysize, None, "s"),
2845
+ ("x", pl_sb_U_ex_ices_bysize, -2, "ices"),
2846
+ ("x", pl_sb_U_ix_ices_bysize, -2, "ices"),
2847
+ ("m", pl_sb_U_um_a_bysize, -2, "a"),
2848
+ ("s", pl_sb_U_us_i_bysize, -2, "i"),
2849
+ ("n", pl_sb_U_on_a_bysize, -2, "a"),
2850
+ ("a", pl_sb_U_a_ae_bysize, None, "e"),
2851
+ ):
2852
+ if word.lowered[-1] == lastlet: # this test to add speed
2853
+ for k, v in d.items():
2854
+ if word.lowered[-k:] in v:
2855
+ return word[:numend] + post
2856
+
2857
+ # HANDLE INCOMPLETELY ASSIMILATED IMPORTS
2858
+
2859
+ if self.classical_dict["ancient"]:
2860
+ if word.lowered[-4:] == "trix":
2861
+ return f"{word[:-1]}ces"
2862
+ if word.lowered[-3:] in ("eau", "ieu"):
2863
+ return f"{word}x"
2864
+ if word.lowered[-3:] in ("ynx", "inx", "anx") and len(word) > 4:
2865
+ return f"{word[:-1]}ges"
2866
+
2867
+ for lastlet, d, numend, post in (
2868
+ ("n", pl_sb_C_en_ina_bysize, -2, "ina"),
2869
+ ("x", pl_sb_C_ex_ices_bysize, -2, "ices"),
2870
+ ("x", pl_sb_C_ix_ices_bysize, -2, "ices"),
2871
+ ("m", pl_sb_C_um_a_bysize, -2, "a"),
2872
+ ("s", pl_sb_C_us_i_bysize, -2, "i"),
2873
+ ("s", pl_sb_C_us_us_bysize, None, ""),
2874
+ ("a", pl_sb_C_a_ae_bysize, None, "e"),
2875
+ ("a", pl_sb_C_a_ata_bysize, None, "ta"),
2876
+ ("s", pl_sb_C_is_ides_bysize, -1, "des"),
2877
+ ("o", pl_sb_C_o_i_bysize, -1, "i"),
2878
+ ("n", pl_sb_C_on_a_bysize, -2, "a"),
2879
+ ):
2880
+ if word.lowered[-1] == lastlet: # this test to add speed
2881
+ for k, v in d.items():
2882
+ if word.lowered[-k:] in v:
2883
+ return word[:numend] + post
2884
+
2885
+ for d, numend, post in (
2886
+ (pl_sb_C_i_bysize, None, "i"),
2887
+ (pl_sb_C_im_bysize, None, "im"),
2888
+ ):
2889
+ for k, v in d.items():
2890
+ if word.lowered[-k:] in v:
2891
+ return word[:numend] + post
2892
+
2893
+ # HANDLE SINGULAR NOUNS ENDING IN ...s OR OTHER SILIBANTS
2894
+
2895
+ if lowered_last in pl_sb_singular_s_complete:
2896
+ return f"{word}es"
2897
+
2898
+ for k, v in pl_sb_singular_s_bysize.items():
2899
+ if word.lowered[-k:] in v:
2900
+ return f"{word}es"
2901
+
2902
+ if word.lowered[-2:] == "es" and word[0] == word[0].upper():
2903
+ return f"{word}es"
2904
+
2905
+ if word.lowered[-1] == "z":
2906
+ for k, v in pl_sb_z_zes_bysize.items():
2907
+ if word.lowered[-k:] in v:
2908
+ return f"{word}es"
2909
+
2910
+ if word.lowered[-2:-1] != "z":
2911
+ return f"{word}zes"
2912
+
2913
+ if word.lowered[-2:] == "ze":
2914
+ for k, v in pl_sb_ze_zes_bysize.items():
2915
+ if word.lowered[-k:] in v:
2916
+ return f"{word}s"
2917
+
2918
+ if word.lowered[-2:] in ("ch", "sh", "zz", "ss") or word.lowered[-1] == "x":
2919
+ return f"{word}es"
2920
+
2921
+ # HANDLE ...f -> ...ves
2922
+
2923
+ if word.lowered[-3:] in ("elf", "alf", "olf"):
2924
+ return f"{word[:-1]}ves"
2925
+ if word.lowered[-3:] == "eaf" and word.lowered[-4:-3] != "d":
2926
+ return f"{word[:-1]}ves"
2927
+ if word.lowered[-4:] in ("nife", "life", "wife"):
2928
+ return f"{word[:-2]}ves"
2929
+ if word.lowered[-3:] == "arf":
2930
+ return f"{word[:-1]}ves"
2931
+
2932
+ # HANDLE ...y
2933
+
2934
+ if word.lowered[-1] == "y":
2935
+ if word.lowered[-2:-1] in "aeiou" or len(word) == 1:
2936
+ return f"{word}s"
2937
+
2938
+ if self.classical_dict["names"]:
2939
+ if word.lowered[-1] == "y" and word[0] == word[0].upper():
2940
+ return f"{word}s"
2941
+
2942
+ return f"{word[:-1]}ies"
2943
+
2944
+ # HANDLE ...o
2945
+
2946
+ if lowered_last in pl_sb_U_o_os_complete:
2947
+ return f"{word}s"
2948
+
2949
+ for k, v in pl_sb_U_o_os_bysize.items():
2950
+ if word.lowered[-k:] in v:
2951
+ return f"{word}s"
2952
+
2953
+ if word.lowered[-2:] in ("ao", "eo", "io", "oo", "uo"):
2954
+ return f"{word}s"
2955
+
2956
+ if word.lowered[-1] == "o":
2957
+ return f"{word}es"
2958
+
2959
+ # OTHERWISE JUST ADD ...s
2960
+
2961
+ return f"{word}s"
2962
+
2963
+ @classmethod
2964
+ def _handle_prepositional_phrase(cls, phrase, transform, sep):
2965
+ """
2966
+ Given a word or phrase possibly separated by sep, parse out
2967
+ the prepositional phrase and apply the transform to the word
2968
+ preceding the prepositional phrase.
2969
+
2970
+ Raise ValueError if the pivot is not found or if at least two
2971
+ separators are not found.
2972
+
2973
+ >>> engine._handle_prepositional_phrase("man-of-war", str.upper, '-')
2974
+ 'MAN-of-war'
2975
+ >>> engine._handle_prepositional_phrase("man of war", str.upper, ' ')
2976
+ 'MAN of war'
2977
+ """
2978
+ parts = phrase.split(sep)
2979
+ if len(parts) < 3:
2980
+ raise ValueError("Cannot handle words with fewer than two separators")
2981
+
2982
+ pivot = cls._find_pivot(parts, pl_prep_list_da)
2983
+
2984
+ transformed = transform(parts[pivot - 1]) or parts[pivot - 1]
2985
+ return " ".join(
2986
+ parts[: pivot - 1] + [sep.join([transformed, parts[pivot], ''])]
2987
+ ) + " ".join(parts[(pivot + 1) :])
2988
+
2989
+ def _handle_long_compounds(self, word: Words, count: int) -> Union[str, None]:
2990
+ """
2991
+ Handles the plural and singular for compound `Words` that
2992
+ have three or more words, based on the given count.
2993
+
2994
+ >>> engine()._handle_long_compounds(Words("pair of scissors"), 2)
2995
+ 'pairs of scissors'
2996
+ >>> engine()._handle_long_compounds(Words("men beyond hills"), 1)
2997
+ 'man beyond hills'
2998
+ """
2999
+ inflection = self._sinoun if count == 1 else self._plnoun
3000
+ solutions = ( # type: ignore
3001
+ " ".join(
3002
+ itertools.chain(
3003
+ leader,
3004
+ [inflection(cand, count), prep], # type: ignore
3005
+ trailer,
3006
+ )
3007
+ )
3008
+ for leader, (cand, prep), trailer in windowed_complete(word.split_, 2)
3009
+ if prep in pl_prep_list_da # type: ignore
3010
+ )
3011
+ return next(solutions, None)
3012
+
3013
+ @staticmethod
3014
+ def _find_pivot(words, candidates):
3015
+ pivots = (
3016
+ index for index in range(1, len(words) - 1) if words[index] in candidates
3017
+ )
3018
+ try:
3019
+ return next(pivots)
3020
+ except StopIteration:
3021
+ raise ValueError("No pivot found") from None
3022
+
3023
+ def _pl_special_verb( # noqa: C901
3024
+ self, word: str, count: Optional[Union[str, int]] = None
3025
+ ) -> Union[str, bool]:
3026
+ if self.classical_dict["zero"] and str(count).lower() in pl_count_zero:
3027
+ return False
3028
+ count = self.get_count(count)
3029
+
3030
+ if count == 1:
3031
+ return word
3032
+
3033
+ # HANDLE USER-DEFINED VERBS
3034
+
3035
+ value = self.ud_match(word, self.pl_v_user_defined)
3036
+ if value is not None:
3037
+ return value
3038
+
3039
+ # HANDLE IRREGULAR PRESENT TENSE (SIMPLE AND COMPOUND)
3040
+
3041
+ try:
3042
+ words = Words(word)
3043
+ except IndexError:
3044
+ return False # word is ''
3045
+
3046
+ if words.first in plverb_irregular_pres:
3047
+ return f"{plverb_irregular_pres[words.first]}{words[len(words.first) :]}"
3048
+
3049
+ # HANDLE IRREGULAR FUTURE, PRETERITE AND PERFECT TENSES
3050
+
3051
+ if words.first in plverb_irregular_non_pres:
3052
+ return word
3053
+
3054
+ # HANDLE PRESENT NEGATIONS (SIMPLE AND COMPOUND)
3055
+
3056
+ if words.first.endswith("n't") and words.first[:-3] in plverb_irregular_pres:
3057
+ return (
3058
+ f"{plverb_irregular_pres[words.first[:-3]]}n't"
3059
+ f"{words[len(words.first) :]}"
3060
+ )
3061
+
3062
+ if words.first.endswith("n't"):
3063
+ return word
3064
+
3065
+ # HANDLE SPECIAL CASES
3066
+
3067
+ mo = PLVERB_SPECIAL_S_RE.search(word)
3068
+ if mo:
3069
+ return False
3070
+ if WHITESPACE.search(word):
3071
+ return False
3072
+
3073
+ if words.lowered == "quizzes":
3074
+ return "quiz"
3075
+
3076
+ # HANDLE STANDARD 3RD PERSON (CHOP THE ...(e)s OFF SINGLE WORDS)
3077
+
3078
+ if (
3079
+ words.lowered[-4:] in ("ches", "shes", "zzes", "sses")
3080
+ or words.lowered[-3:] == "xes"
3081
+ ):
3082
+ return words[:-2]
3083
+
3084
+ if words.lowered[-3:] == "ies" and len(words) > 3:
3085
+ return words.lowered[:-3] + "y"
3086
+
3087
+ if (
3088
+ words.last.lower() in pl_v_oes_oe
3089
+ or words.lowered[-4:] in pl_v_oes_oe_endings_size4
3090
+ or words.lowered[-5:] in pl_v_oes_oe_endings_size5
3091
+ ):
3092
+ return words[:-1]
3093
+
3094
+ if words.lowered.endswith("oes") and len(words) > 3:
3095
+ return words.lowered[:-2]
3096
+
3097
+ mo = ENDS_WITH_S.search(words)
3098
+ if mo:
3099
+ return mo.group(1)
3100
+
3101
+ # OTHERWISE, A REGULAR VERB (HANDLE ELSEWHERE)
3102
+
3103
+ return False
3104
+
3105
+ def _pl_general_verb(
3106
+ self, word: str, count: Optional[Union[str, int]] = None
3107
+ ) -> str:
3108
+ count = self.get_count(count)
3109
+
3110
+ if count == 1:
3111
+ return word
3112
+
3113
+ # HANDLE AMBIGUOUS PRESENT TENSES (SIMPLE AND COMPOUND)
3114
+
3115
+ mo = plverb_ambiguous_pres_keys.search(word)
3116
+ if mo:
3117
+ return f"{plverb_ambiguous_pres[mo.group(1).lower()]}{mo.group(2)}"
3118
+
3119
+ # HANDLE AMBIGUOUS PRETERITE AND PERFECT TENSES
3120
+
3121
+ mo = plverb_ambiguous_non_pres.search(word)
3122
+ if mo:
3123
+ return word
3124
+
3125
+ # OTHERWISE, 1st OR 2ND PERSON IS UNINFLECTED
3126
+
3127
+ return word
3128
+
3129
+ def _pl_special_adjective(
3130
+ self, word: str, count: Optional[Union[str, int]] = None
3131
+ ) -> Union[str, bool]:
3132
+ count = self.get_count(count)
3133
+
3134
+ if count == 1:
3135
+ return word
3136
+
3137
+ # HANDLE USER-DEFINED ADJECTIVES
3138
+
3139
+ value = self.ud_match(word, self.pl_adj_user_defined)
3140
+ if value is not None:
3141
+ return value
3142
+
3143
+ # HANDLE KNOWN CASES
3144
+
3145
+ mo = pl_adj_special_keys.search(word)
3146
+ if mo:
3147
+ return pl_adj_special[mo.group(1).lower()]
3148
+
3149
+ # HANDLE POSSESSIVES
3150
+
3151
+ mo = pl_adj_poss_keys.search(word)
3152
+ if mo:
3153
+ return pl_adj_poss[mo.group(1).lower()]
3154
+
3155
+ mo = ENDS_WITH_APOSTROPHE_S.search(word)
3156
+ if mo:
3157
+ pl = self.plural_noun(mo.group(1))
3158
+ trailing_s = "" if pl[-1] == "s" else "s"
3159
+ return f"{pl}'{trailing_s}"
3160
+
3161
+ # OTHERWISE, NO IDEA
3162
+
3163
+ return False
3164
+
3165
+ # @profile
3166
+ def _sinoun( # noqa: C901
3167
+ self,
3168
+ word: str,
3169
+ count: Optional[Union[str, int]] = None,
3170
+ gender: Optional[str] = None,
3171
+ ) -> Union[str, bool]:
3172
+ count = self.get_count(count)
3173
+
3174
+ # DEFAULT TO PLURAL
3175
+
3176
+ if count == 2:
3177
+ return word
3178
+
3179
+ # SET THE GENDER
3180
+
3181
+ try:
3182
+ if gender is None:
3183
+ gender = self.thegender
3184
+ elif gender not in singular_pronoun_genders:
3185
+ raise BadGenderError
3186
+ except (TypeError, IndexError) as err:
3187
+ raise BadGenderError from err
3188
+
3189
+ # HANDLE USER-DEFINED NOUNS
3190
+
3191
+ value = self.ud_match(word, self.si_sb_user_defined)
3192
+ if value is not None:
3193
+ return value
3194
+
3195
+ # HANDLE EMPTY WORD, SINGULAR COUNT AND UNINFLECTED PLURALS
3196
+
3197
+ if word == "":
3198
+ return word
3199
+
3200
+ if word in si_sb_ois_oi_case:
3201
+ return word[:-1]
3202
+
3203
+ words = Words(word)
3204
+
3205
+ if words.last.lower() in pl_sb_uninflected_complete:
3206
+ if len(words.split_) >= 3:
3207
+ return self._handle_long_compounds(words, count=1) or word
3208
+ return word
3209
+
3210
+ if word in pl_sb_uninflected_caps:
3211
+ return word
3212
+
3213
+ for k, v in pl_sb_uninflected_bysize.items():
3214
+ if words.lowered[-k:] in v:
3215
+ return word
3216
+
3217
+ if self.classical_dict["herd"] and words.last.lower() in pl_sb_uninflected_herd:
3218
+ return word
3219
+
3220
+ if words.last.lower() in pl_sb_C_us_us:
3221
+ return word if self.classical_dict["ancient"] else False
3222
+
3223
+ # HANDLE COMPOUNDS ("Governor General", "mother-in-law", "aide-de-camp", ETC.)
3224
+
3225
+ mo = PL_SB_POSTFIX_ADJ_STEMS_RE.search(word)
3226
+ if mo and mo.group(2) != "":
3227
+ return f"{self._sinoun(mo.group(1), 1, gender=gender)}{mo.group(2)}"
3228
+
3229
+ with contextlib.suppress(ValueError):
3230
+ return self._handle_prepositional_phrase(
3231
+ words.lowered,
3232
+ functools.partial(self._sinoun, count=1, gender=gender),
3233
+ ' ',
3234
+ )
3235
+
3236
+ with contextlib.suppress(ValueError):
3237
+ return self._handle_prepositional_phrase(
3238
+ words.lowered,
3239
+ functools.partial(self._sinoun, count=1, gender=gender),
3240
+ '-',
3241
+ )
3242
+
3243
+ # HANDLE PRONOUNS
3244
+
3245
+ for k, v in si_pron_acc_keys_bysize.items():
3246
+ if words.lowered[-k:] in v: # ends with accusative pronoun
3247
+ for pk, pv in pl_prep_bysize.items():
3248
+ if words.lowered[:pk] in pv: # starts with a prep
3249
+ if words.lowered.split() == [
3250
+ words.lowered[:pk],
3251
+ words.lowered[-k:],
3252
+ ]:
3253
+ # only whitespace in between
3254
+ return words.lowered[:-k] + get_si_pron(
3255
+ "acc", words.lowered[-k:], gender
3256
+ )
3257
+
3258
+ try:
3259
+ return get_si_pron("nom", words.lowered, gender)
3260
+ except KeyError:
3261
+ pass
3262
+
3263
+ try:
3264
+ return get_si_pron("acc", words.lowered, gender)
3265
+ except KeyError:
3266
+ pass
3267
+
3268
+ # HANDLE ISOLATED IRREGULAR PLURALS
3269
+
3270
+ if words.last in si_sb_irregular_caps:
3271
+ llen = len(words.last)
3272
+ return f"{word[:-llen]}{si_sb_irregular_caps[words.last]}"
3273
+
3274
+ if words.last.lower() in si_sb_irregular:
3275
+ llen = len(words.last.lower())
3276
+ return f"{word[:-llen]}{si_sb_irregular[words.last.lower()]}"
3277
+
3278
+ dash_split = words.lowered.split("-")
3279
+ if (" ".join(dash_split[-2:])).lower() in si_sb_irregular_compound:
3280
+ llen = len(
3281
+ " ".join(dash_split[-2:])
3282
+ ) # TODO: what if 2 spaces between these words?
3283
+ return "{}{}".format(
3284
+ word[:-llen],
3285
+ si_sb_irregular_compound[(" ".join(dash_split[-2:])).lower()],
3286
+ )
3287
+
3288
+ if words.lowered[-5:] == "quies":
3289
+ return word[:-3] + "y"
3290
+
3291
+ if words.lowered[-7:] == "persons":
3292
+ return word[:-1]
3293
+ if words.lowered[-6:] == "people":
3294
+ return word[:-4] + "rson"
3295
+
3296
+ # HANDLE FAMILIES OF IRREGULAR PLURALS
3297
+
3298
+ if words.lowered[-4:] == "mans":
3299
+ for k, v in si_sb_U_man_mans_bysize.items():
3300
+ if words.lowered[-k:] in v:
3301
+ return word[:-1]
3302
+ for k, v in si_sb_U_man_mans_caps_bysize.items():
3303
+ if word[-k:] in v:
3304
+ return word[:-1]
3305
+ if words.lowered[-3:] == "men":
3306
+ return word[:-3] + "man"
3307
+ if words.lowered[-4:] == "mice":
3308
+ return word[:-4] + "mouse"
3309
+ if words.lowered[-4:] == "lice":
3310
+ v = si_sb_U_louse_lice_bysize.get(len(word))
3311
+ if v and words.lowered in v:
3312
+ return word[:-4] + "louse"
3313
+ if words.lowered[-5:] == "geese":
3314
+ return word[:-5] + "goose"
3315
+ if words.lowered[-5:] == "teeth":
3316
+ return word[:-5] + "tooth"
3317
+ if words.lowered[-4:] == "feet":
3318
+ return word[:-4] + "foot"
3319
+
3320
+ if words.lowered == "dice":
3321
+ return "die"
3322
+
3323
+ # HANDLE UNASSIMILATED IMPORTS
3324
+
3325
+ if words.lowered[-4:] == "ceps":
3326
+ return word
3327
+ if words.lowered[-3:] == "zoa":
3328
+ return word[:-1] + "on"
3329
+
3330
+ for lastlet, d, unass_numend, post in (
3331
+ ("s", si_sb_U_ch_chs_bysize, -1, ""),
3332
+ ("s", si_sb_U_ex_ices_bysize, -4, "ex"),
3333
+ ("s", si_sb_U_ix_ices_bysize, -4, "ix"),
3334
+ ("a", si_sb_U_um_a_bysize, -1, "um"),
3335
+ ("i", si_sb_U_us_i_bysize, -1, "us"),
3336
+ ("a", si_sb_U_on_a_bysize, -1, "on"),
3337
+ ("e", si_sb_U_a_ae_bysize, -1, ""),
3338
+ ):
3339
+ if words.lowered[-1] == lastlet: # this test to add speed
3340
+ for k, v in d.items():
3341
+ if words.lowered[-k:] in v:
3342
+ return word[:unass_numend] + post
3343
+
3344
+ # HANDLE INCOMPLETELY ASSIMILATED IMPORTS
3345
+
3346
+ if self.classical_dict["ancient"]:
3347
+ if words.lowered[-6:] == "trices":
3348
+ return word[:-3] + "x"
3349
+ if words.lowered[-4:] in ("eaux", "ieux"):
3350
+ return word[:-1]
3351
+ if words.lowered[-5:] in ("ynges", "inges", "anges") and len(word) > 6:
3352
+ return word[:-3] + "x"
3353
+
3354
+ for lastlet, d, class_numend, post in (
3355
+ ("a", si_sb_C_en_ina_bysize, -3, "en"),
3356
+ ("s", si_sb_C_ex_ices_bysize, -4, "ex"),
3357
+ ("s", si_sb_C_ix_ices_bysize, -4, "ix"),
3358
+ ("a", si_sb_C_um_a_bysize, -1, "um"),
3359
+ ("i", si_sb_C_us_i_bysize, -1, "us"),
3360
+ ("s", pl_sb_C_us_us_bysize, None, ""),
3361
+ ("e", si_sb_C_a_ae_bysize, -1, ""),
3362
+ ("a", si_sb_C_a_ata_bysize, -2, ""),
3363
+ ("s", si_sb_C_is_ides_bysize, -3, "s"),
3364
+ ("i", si_sb_C_o_i_bysize, -1, "o"),
3365
+ ("a", si_sb_C_on_a_bysize, -1, "on"),
3366
+ ("m", si_sb_C_im_bysize, -2, ""),
3367
+ ("i", si_sb_C_i_bysize, -1, ""),
3368
+ ):
3369
+ if words.lowered[-1] == lastlet: # this test to add speed
3370
+ for k, v in d.items():
3371
+ if words.lowered[-k:] in v:
3372
+ return word[:class_numend] + post
3373
+
3374
+ # HANDLE PLURLS ENDING IN uses -> use
3375
+
3376
+ if (
3377
+ words.lowered[-6:] == "houses"
3378
+ or word in si_sb_uses_use_case
3379
+ or words.last.lower() in si_sb_uses_use
3380
+ ):
3381
+ return word[:-1]
3382
+
3383
+ # HANDLE PLURLS ENDING IN ies -> ie
3384
+
3385
+ if word in si_sb_ies_ie_case or words.last.lower() in si_sb_ies_ie:
3386
+ return word[:-1]
3387
+
3388
+ # HANDLE PLURLS ENDING IN oes -> oe
3389
+
3390
+ if (
3391
+ words.lowered[-5:] == "shoes"
3392
+ or word in si_sb_oes_oe_case
3393
+ or words.last.lower() in si_sb_oes_oe
3394
+ ):
3395
+ return word[:-1]
3396
+
3397
+ # HANDLE SINGULAR NOUNS ENDING IN ...s OR OTHER SILIBANTS
3398
+
3399
+ if word in si_sb_sses_sse_case or words.last.lower() in si_sb_sses_sse:
3400
+ return word[:-1]
3401
+
3402
+ if words.last.lower() in si_sb_singular_s_complete:
3403
+ return word[:-2]
3404
+
3405
+ for k, v in si_sb_singular_s_bysize.items():
3406
+ if words.lowered[-k:] in v:
3407
+ return word[:-2]
3408
+
3409
+ if words.lowered[-4:] == "eses" and word[0] == word[0].upper():
3410
+ return word[:-2]
3411
+
3412
+ if words.last.lower() in si_sb_z_zes:
3413
+ return word[:-2]
3414
+
3415
+ if words.last.lower() in si_sb_zzes_zz:
3416
+ return word[:-2]
3417
+
3418
+ if words.lowered[-4:] == "zzes":
3419
+ return word[:-3]
3420
+
3421
+ if word in si_sb_ches_che_case or words.last.lower() in si_sb_ches_che:
3422
+ return word[:-1]
3423
+
3424
+ if words.lowered[-4:] in ("ches", "shes"):
3425
+ return word[:-2]
3426
+
3427
+ if words.last.lower() in si_sb_xes_xe:
3428
+ return word[:-1]
3429
+
3430
+ if words.lowered[-3:] == "xes":
3431
+ return word[:-2]
3432
+
3433
+ # HANDLE ...f -> ...ves
3434
+
3435
+ if word in si_sb_ves_ve_case or words.last.lower() in si_sb_ves_ve:
3436
+ return word[:-1]
3437
+
3438
+ if words.lowered[-3:] == "ves":
3439
+ if words.lowered[-5:-3] in ("el", "al", "ol"):
3440
+ return word[:-3] + "f"
3441
+ if words.lowered[-5:-3] == "ea" and word[-6:-5] != "d":
3442
+ return word[:-3] + "f"
3443
+ if words.lowered[-5:-3] in ("ni", "li", "wi"):
3444
+ return word[:-3] + "fe"
3445
+ if words.lowered[-5:-3] == "ar":
3446
+ return word[:-3] + "f"
3447
+
3448
+ # HANDLE ...y
3449
+
3450
+ if words.lowered[-2:] == "ys":
3451
+ if len(words.lowered) > 2 and words.lowered[-3] in "aeiou":
3452
+ return word[:-1]
3453
+
3454
+ if self.classical_dict["names"]:
3455
+ if words.lowered[-2:] == "ys" and word[0] == word[0].upper():
3456
+ return word[:-1]
3457
+
3458
+ if words.lowered[-3:] == "ies":
3459
+ return word[:-3] + "y"
3460
+
3461
+ # HANDLE ...o
3462
+
3463
+ if words.lowered[-2:] == "os":
3464
+ if words.last.lower() in si_sb_U_o_os_complete:
3465
+ return word[:-1]
3466
+
3467
+ for k, v in si_sb_U_o_os_bysize.items():
3468
+ if words.lowered[-k:] in v:
3469
+ return word[:-1]
3470
+
3471
+ if words.lowered[-3:] in ("aos", "eos", "ios", "oos", "uos"):
3472
+ return word[:-1]
3473
+
3474
+ if words.lowered[-3:] == "oes":
3475
+ return word[:-2]
3476
+
3477
+ # UNASSIMILATED IMPORTS FINAL RULE
3478
+
3479
+ if word in si_sb_es_is:
3480
+ return word[:-2] + "is"
3481
+
3482
+ # OTHERWISE JUST REMOVE ...s
3483
+
3484
+ if words.lowered[-1] == "s":
3485
+ return word[:-1]
3486
+
3487
+ # COULD NOT FIND SINGULAR
3488
+
3489
+ return False
3490
+
3491
+ # ADJECTIVES
3492
+
3493
+ @typechecked
3494
+ def a(self, text: Word, count: Optional[Union[int, str, Any]] = 1) -> str:
3495
+ """
3496
+ Return the appropriate indefinite article followed by text.
3497
+
3498
+ The indefinite article is either 'a' or 'an'.
3499
+
3500
+ If count is not one, then return count followed by text
3501
+ instead of 'a' or 'an'.
3502
+
3503
+ Whitespace at the start and end is preserved.
3504
+
3505
+ """
3506
+ mo = INDEFINITE_ARTICLE_TEST.search(text)
3507
+ if mo:
3508
+ word = mo.group(2)
3509
+ if not word:
3510
+ return text
3511
+ pre = mo.group(1)
3512
+ post = mo.group(3)
3513
+ result = self._indef_article(word, count)
3514
+ return f"{pre}{result}{post}"
3515
+ return ""
3516
+
3517
+ an = a
3518
+
3519
+ _indef_article_cases = (
3520
+ # HANDLE ORDINAL FORMS
3521
+ (A_ordinal_a, "a"),
3522
+ (A_ordinal_an, "an"),
3523
+ # HANDLE SPECIAL CASES
3524
+ (A_explicit_an, "an"),
3525
+ (SPECIAL_AN, "an"),
3526
+ (SPECIAL_A, "a"),
3527
+ # HANDLE ABBREVIATIONS
3528
+ (A_abbrev, "an"),
3529
+ (SPECIAL_ABBREV_AN, "an"),
3530
+ (SPECIAL_ABBREV_A, "a"),
3531
+ # HANDLE CONSONANTS
3532
+ (CONSONANTS, "a"),
3533
+ # HANDLE SPECIAL VOWEL-FORMS
3534
+ (ARTICLE_SPECIAL_EU, "a"),
3535
+ (ARTICLE_SPECIAL_ONCE, "a"),
3536
+ (ARTICLE_SPECIAL_ONETIME, "a"),
3537
+ (ARTICLE_SPECIAL_UNIT, "a"),
3538
+ (ARTICLE_SPECIAL_UBA, "a"),
3539
+ (ARTICLE_SPECIAL_UKR, "a"),
3540
+ (A_explicit_a, "a"),
3541
+ # HANDLE SPECIAL CAPITALS
3542
+ (SPECIAL_CAPITALS, "a"),
3543
+ # HANDLE VOWELS
3544
+ (VOWELS, "an"),
3545
+ # HANDLE y...
3546
+ # (BEFORE CERTAIN CONSONANTS IMPLIES (UNNATURALIZED) "i.." SOUND)
3547
+ (A_y_cons, "an"),
3548
+ )
3549
+
3550
+ def _indef_article(self, word: str, count: Union[int, str, Any]) -> str:
3551
+ mycount = self.get_count(count)
3552
+
3553
+ if mycount != 1:
3554
+ return f"{count} {word}"
3555
+
3556
+ # HANDLE USER-DEFINED VARIANTS
3557
+
3558
+ value = self.ud_match(word, self.A_a_user_defined)
3559
+ if value is not None:
3560
+ return f"{value} {word}"
3561
+
3562
+ matches = (
3563
+ f'{article} {word}'
3564
+ for regexen, article in self._indef_article_cases
3565
+ if regexen.search(word)
3566
+ )
3567
+
3568
+ # OTHERWISE, GUESS "a"
3569
+ fallback = f'a {word}'
3570
+ return next(matches, fallback)
3571
+
3572
+ # 2. TRANSLATE ZERO-QUANTIFIED $word TO "no plural($word)"
3573
+
3574
+ @typechecked
3575
+ def no(self, text: Word, count: Optional[Union[int, str]] = None) -> str:
3576
+ """
3577
+ If count is 0, no, zero or nil, return 'no' followed by the plural
3578
+ of text.
3579
+
3580
+ If count is one of:
3581
+ 1, a, an, one, each, every, this, that
3582
+ return count followed by text.
3583
+
3584
+ Otherwise return count follow by the plural of text.
3585
+
3586
+ In the return value count is always followed by a space.
3587
+
3588
+ Whitespace at the start and end is preserved.
3589
+
3590
+ """
3591
+ if count is None and self.persistent_count is not None:
3592
+ count = self.persistent_count
3593
+
3594
+ if count is None:
3595
+ count = 0
3596
+ mo = PARTITION_WORD.search(text)
3597
+ if mo:
3598
+ pre = mo.group(1)
3599
+ word = mo.group(2)
3600
+ post = mo.group(3)
3601
+ else:
3602
+ pre = ""
3603
+ word = ""
3604
+ post = ""
3605
+
3606
+ if str(count).lower() in pl_count_zero:
3607
+ count = 'no'
3608
+ return f"{pre}{count} {self.plural(word, count)}{post}"
3609
+
3610
+ # PARTICIPLES
3611
+
3612
+ @typechecked
3613
+ def present_participle(self, word: Word) -> str:
3614
+ """
3615
+ Return the present participle for word.
3616
+
3617
+ word is the 3rd person singular verb.
3618
+
3619
+ """
3620
+ plv = self.plural_verb(word, 2)
3621
+ ans = plv
3622
+
3623
+ for regexen, repl in PRESENT_PARTICIPLE_REPLACEMENTS:
3624
+ ans, num = regexen.subn(repl, plv)
3625
+ if num:
3626
+ return f"{ans}ing"
3627
+ return f"{ans}ing"
3628
+
3629
+ # NUMERICAL INFLECTIONS
3630
+
3631
+ @typechecked
3632
+ def ordinal(self, num: Union[Number, Word]) -> str:
3633
+ """
3634
+ Return the ordinal of num.
3635
+
3636
+ >>> ordinal = engine().ordinal
3637
+ >>> ordinal(1)
3638
+ '1st'
3639
+ >>> ordinal('one')
3640
+ 'first'
3641
+ """
3642
+ if DIGIT.match(str(num)):
3643
+ if isinstance(num, (float, int)) and int(num) == num:
3644
+ n = int(num)
3645
+ else:
3646
+ if "." in str(num):
3647
+ try:
3648
+ # numbers after decimal,
3649
+ # so only need last one for ordinal
3650
+ n = int(str(num)[-1])
3651
+
3652
+ except ValueError: # ends with '.', so need to use whole string
3653
+ n = int(str(num)[:-1])
3654
+ else:
3655
+ n = int(num) # type: ignore
3656
+ try:
3657
+ post = nth[n % 100]
3658
+ except KeyError:
3659
+ post = nth[n % 10]
3660
+ return f"{num}{post}"
3661
+ else:
3662
+ return self._sub_ord(num)
3663
+
3664
+ def millfn(self, ind: int = 0) -> str:
3665
+ if ind > len(mill) - 1:
3666
+ raise NumOutOfRangeError
3667
+ return mill[ind]
3668
+
3669
+ def unitfn(self, units: int, mindex: int = 0) -> str:
3670
+ return f"{unit[units]}{self.millfn(mindex)}"
3671
+
3672
+ def tenfn(self, tens, units, mindex=0) -> str:
3673
+ if tens != 1:
3674
+ tens_part = ten[tens]
3675
+ if tens and units:
3676
+ hyphen = "-"
3677
+ else:
3678
+ hyphen = ""
3679
+ unit_part = unit[units]
3680
+ mill_part = self.millfn(mindex)
3681
+ return f"{tens_part}{hyphen}{unit_part}{mill_part}"
3682
+ return f"{teen[units]}{mill[mindex]}"
3683
+
3684
+ def hundfn(self, hundreds: int, tens: int, units: int, mindex: int) -> str:
3685
+ if hundreds:
3686
+ andword = f" {self._number_args['andword']} " if tens or units else ""
3687
+ # use unit not unitfn as simpler
3688
+ return (
3689
+ f"{unit[hundreds]} hundred{andword}"
3690
+ f"{self.tenfn(tens, units)}{self.millfn(mindex)}, "
3691
+ )
3692
+ if tens or units:
3693
+ return f"{self.tenfn(tens, units)}{self.millfn(mindex)}, "
3694
+ return ""
3695
+
3696
+ def group1sub(self, mo: Match) -> str:
3697
+ units = int(mo.group(1))
3698
+ if units == 1:
3699
+ return f" {self._number_args['one']}, "
3700
+ elif units:
3701
+ return f"{unit[units]}, "
3702
+ else:
3703
+ return f" {self._number_args['zero']}, "
3704
+
3705
+ def group1bsub(self, mo: Match) -> str:
3706
+ units = int(mo.group(1))
3707
+ if units:
3708
+ return f"{unit[units]}, "
3709
+ else:
3710
+ return f" {self._number_args['zero']}, "
3711
+
3712
+ def group2sub(self, mo: Match) -> str:
3713
+ tens = int(mo.group(1))
3714
+ units = int(mo.group(2))
3715
+ if tens:
3716
+ return f"{self.tenfn(tens, units)}, "
3717
+ if units:
3718
+ return f" {self._number_args['zero']} {unit[units]}, "
3719
+ return f" {self._number_args['zero']} {self._number_args['zero']}, "
3720
+
3721
+ def group3sub(self, mo: Match) -> str:
3722
+ hundreds = int(mo.group(1))
3723
+ tens = int(mo.group(2))
3724
+ units = int(mo.group(3))
3725
+ if hundreds == 1:
3726
+ hunword = f" {self._number_args['one']}"
3727
+ elif hundreds:
3728
+ hunword = str(unit[hundreds])
3729
+ else:
3730
+ hunword = f" {self._number_args['zero']}"
3731
+ if tens:
3732
+ tenword = self.tenfn(tens, units)
3733
+ elif units:
3734
+ tenword = f" {self._number_args['zero']} {unit[units]}"
3735
+ else:
3736
+ tenword = f" {self._number_args['zero']} {self._number_args['zero']}"
3737
+ return f"{hunword} {tenword}, "
3738
+
3739
+ def hundsub(self, mo: Match) -> str:
3740
+ ret = self.hundfn(
3741
+ int(mo.group(1)), int(mo.group(2)), int(mo.group(3)), self.mill_count
3742
+ )
3743
+ self.mill_count += 1
3744
+ return ret
3745
+
3746
+ def tensub(self, mo: Match) -> str:
3747
+ return f"{self.tenfn(int(mo.group(1)), int(mo.group(2)), self.mill_count)}, "
3748
+
3749
+ def unitsub(self, mo: Match) -> str:
3750
+ return f"{self.unitfn(int(mo.group(1)), self.mill_count)}, "
3751
+
3752
+ def enword(self, num: str, group: int) -> str:
3753
+ # import pdb
3754
+ # pdb.set_trace()
3755
+
3756
+ if group == 1:
3757
+ num = DIGIT_GROUP.sub(self.group1sub, num)
3758
+ elif group == 2:
3759
+ num = TWO_DIGITS.sub(self.group2sub, num)
3760
+ num = DIGIT_GROUP.sub(self.group1bsub, num, 1)
3761
+ elif group == 3:
3762
+ num = THREE_DIGITS.sub(self.group3sub, num)
3763
+ num = TWO_DIGITS.sub(self.group2sub, num, 1)
3764
+ num = DIGIT_GROUP.sub(self.group1sub, num, 1)
3765
+ elif int(num) == 0:
3766
+ num = self._number_args["zero"]
3767
+ elif int(num) == 1:
3768
+ num = self._number_args["one"]
3769
+ else:
3770
+ num = num.lstrip().lstrip("0")
3771
+ self.mill_count = 0
3772
+ # surely there's a better way to do the next bit
3773
+ mo = THREE_DIGITS_WORD.search(num)
3774
+ while mo:
3775
+ num = THREE_DIGITS_WORD.sub(self.hundsub, num, 1)
3776
+ mo = THREE_DIGITS_WORD.search(num)
3777
+ num = TWO_DIGITS_WORD.sub(self.tensub, num, 1)
3778
+ num = ONE_DIGIT_WORD.sub(self.unitsub, num, 1)
3779
+ return num
3780
+
3781
+ @staticmethod
3782
+ def _sub_ord(val):
3783
+ new = ordinal_suff.sub(lambda match: ordinal[match.group(1)], val)
3784
+ return new + "th" * (new == val)
3785
+
3786
+ @classmethod
3787
+ def _chunk_num(cls, num, decimal, group):
3788
+ if decimal:
3789
+ max_split = -1 if group != 0 else 1
3790
+ chunks = num.split(".", max_split)
3791
+ else:
3792
+ chunks = [num]
3793
+ return cls._remove_last_blank(chunks)
3794
+
3795
+ @staticmethod
3796
+ def _remove_last_blank(chunks):
3797
+ """
3798
+ Remove the last item from chunks if it's a blank string.
3799
+
3800
+ Return the resultant chunks and whether the last item was removed.
3801
+ """
3802
+ removed = chunks[-1] == ""
3803
+ result = chunks[:-1] if removed else chunks
3804
+ return result, removed
3805
+
3806
+ @staticmethod
3807
+ def _get_sign(num):
3808
+ return {'+': 'plus', '-': 'minus'}.get(num.lstrip()[0], '')
3809
+
3810
+ @typechecked
3811
+ def number_to_words( # noqa: C901
3812
+ self,
3813
+ num: Union[Number, Word],
3814
+ wantlist: bool = False,
3815
+ group: int = 0,
3816
+ comma: Union[Falsish, str] = ",",
3817
+ andword: str = "and",
3818
+ zero: str = "zero",
3819
+ one: str = "one",
3820
+ decimal: Union[Falsish, str] = "point",
3821
+ threshold: Optional[int] = None,
3822
+ ) -> Union[str, List[str]]:
3823
+ """
3824
+ Return a number in words.
3825
+
3826
+ group = 1, 2 or 3 to group numbers before turning into words
3827
+ comma: define comma
3828
+
3829
+ andword:
3830
+ word for 'and'. Can be set to ''.
3831
+ e.g. "one hundred and one" vs "one hundred one"
3832
+
3833
+ zero: word for '0'
3834
+ one: word for '1'
3835
+ decimal: word for decimal point
3836
+ threshold: numbers above threshold not turned into words
3837
+
3838
+ parameters not remembered from last call. Departure from Perl version.
3839
+ """
3840
+ self._number_args = {"andword": andword, "zero": zero, "one": one}
3841
+ num = str(num)
3842
+
3843
+ # Handle "stylistic" conversions (up to a given threshold)...
3844
+ if threshold is not None and float(num) > threshold:
3845
+ spnum = num.split(".", 1)
3846
+ while comma:
3847
+ (spnum[0], n) = FOUR_DIGIT_COMMA.subn(r"\1,\2", spnum[0])
3848
+ if n == 0:
3849
+ break
3850
+ try:
3851
+ return f"{spnum[0]}.{spnum[1]}"
3852
+ except IndexError:
3853
+ return str(spnum[0])
3854
+
3855
+ if group < 0 or group > 3:
3856
+ raise BadChunkingOptionError
3857
+
3858
+ sign = self._get_sign(num)
3859
+
3860
+ if num in nth_suff:
3861
+ num = zero
3862
+
3863
+ myord = num[-2:] in nth_suff
3864
+ if myord:
3865
+ num = num[:-2]
3866
+
3867
+ chunks, finalpoint = self._chunk_num(num, decimal, group)
3868
+
3869
+ loopstart = chunks[0] == ""
3870
+ first: bool | None = not loopstart
3871
+
3872
+ def _handle_chunk(chunk):
3873
+ nonlocal first
3874
+
3875
+ # remove all non numeric \D
3876
+ chunk = NON_DIGIT.sub("", chunk)
3877
+ if chunk == "":
3878
+ chunk = "0"
3879
+
3880
+ if group == 0 and not first:
3881
+ chunk = self.enword(chunk, 1)
3882
+ else:
3883
+ chunk = self.enword(chunk, group)
3884
+
3885
+ if chunk[-2:] == ", ":
3886
+ chunk = chunk[:-2]
3887
+ chunk = WHITESPACES_COMMA.sub(",", chunk)
3888
+
3889
+ if group == 0 and first:
3890
+ chunk = COMMA_WORD.sub(f" {andword} \\1", chunk)
3891
+ chunk = WHITESPACES.sub(" ", chunk)
3892
+ # chunk = re.sub(r"(\A\s|\s\Z)", self.blankfn, chunk)
3893
+ chunk = chunk.strip()
3894
+ if first:
3895
+ first = None
3896
+ return chunk
3897
+
3898
+ chunks[loopstart:] = map(_handle_chunk, chunks[loopstart:])
3899
+
3900
+ numchunks = []
3901
+ if first != 0:
3902
+ numchunks = chunks[0].split(f"{comma} ")
3903
+
3904
+ if myord and numchunks:
3905
+ numchunks[-1] = self._sub_ord(numchunks[-1])
3906
+
3907
+ for chunk in chunks[1:]:
3908
+ numchunks.append(decimal)
3909
+ numchunks.extend(chunk.split(f"{comma} "))
3910
+
3911
+ if finalpoint:
3912
+ numchunks.append(decimal)
3913
+
3914
+ if wantlist:
3915
+ return [sign] * bool(sign) + numchunks
3916
+
3917
+ signout = f"{sign} " if sign else ""
3918
+ valout = (
3919
+ ', '.join(numchunks)
3920
+ if group
3921
+ else ''.join(self._render(numchunks, decimal, comma))
3922
+ )
3923
+ return signout + valout
3924
+
3925
+ @staticmethod
3926
+ def _render(chunks, decimal, comma):
3927
+ first_item = chunks.pop(0)
3928
+ yield first_item
3929
+ first = decimal is None or not first_item.endswith(decimal)
3930
+ for nc in chunks:
3931
+ if nc == decimal:
3932
+ first = False
3933
+ elif first:
3934
+ yield comma
3935
+ yield f" {nc}"
3936
+
3937
+ @typechecked
3938
+ def join(
3939
+ self,
3940
+ words: Optional[Sequence[Word]],
3941
+ sep: Optional[str] = None,
3942
+ sep_spaced: bool = True,
3943
+ final_sep: Optional[str] = None,
3944
+ conj: str = "and",
3945
+ conj_spaced: bool = True,
3946
+ ) -> str:
3947
+ """
3948
+ Join words into a list.
3949
+
3950
+ e.g. join(['ant', 'bee', 'fly']) returns 'ant, bee, and fly'
3951
+
3952
+ options:
3953
+ conj: replacement for 'and'
3954
+ sep: separator. default ',', unless ',' is in the list then ';'
3955
+ final_sep: final separator. default ',', unless ',' is in the list then ';'
3956
+ conj_spaced: boolean. Should conj have spaces around it
3957
+
3958
+ """
3959
+ if not words:
3960
+ return ""
3961
+ if len(words) == 1:
3962
+ return words[0]
3963
+
3964
+ if conj_spaced:
3965
+ if conj == "":
3966
+ conj = " "
3967
+ else:
3968
+ conj = f" {conj} "
3969
+
3970
+ if len(words) == 2:
3971
+ return f"{words[0]}{conj}{words[1]}"
3972
+
3973
+ if sep is None:
3974
+ if "," in "".join(words):
3975
+ sep = ";"
3976
+ else:
3977
+ sep = ","
3978
+ if final_sep is None:
3979
+ final_sep = sep
3980
+
3981
+ final_sep = f"{final_sep}{conj}"
3982
+
3983
+ if sep_spaced:
3984
+ sep += " "
3985
+
3986
+ return f"{sep.join(words[0:-1])}{final_sep}{words[-1]}"