mergeron 2025.739319.3__tar.gz → 2025.739341.8__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mergeron might be problematic. Click here for more details.

Files changed (26) hide show
  1. mergeron-2025.739341.8/PKG-INFO +94 -0
  2. mergeron-2025.739341.8/README.rst +52 -0
  3. {mergeron-2025.739319.3 → mergeron-2025.739341.8}/pyproject.toml +43 -21
  4. {mergeron-2025.739319.3 → mergeron-2025.739341.8}/src/mergeron/__init__.py +21 -23
  5. {mergeron-2025.739319.3 → mergeron-2025.739341.8}/src/mergeron/core/__init__.py +21 -5
  6. mergeron-2025.739341.8/src/mergeron/core/empirical_margin_distribution.py +325 -0
  7. {mergeron-2025.739319.3 → mergeron-2025.739341.8}/src/mergeron/core/ftc_merger_investigations_data.py +31 -35
  8. {mergeron-2025.739319.3 → mergeron-2025.739341.8}/src/mergeron/core/guidelines_boundaries.py +27 -20
  9. {mergeron-2025.739319.3 → mergeron-2025.739341.8}/src/mergeron/core/guidelines_boundary_functions.py +22 -32
  10. {mergeron-2025.739319.3 → mergeron-2025.739341.8}/src/mergeron/core/guidelines_boundary_functions_extra.py +15 -30
  11. {mergeron-2025.739319.3 → mergeron-2025.739341.8}/src/mergeron/core/pseudorandom_numbers.py +21 -18
  12. {mergeron-2025.739319.3 → mergeron-2025.739341.8}/src/mergeron/data/__init__.py +13 -11
  13. mergeron-2025.739341.8/src/mergeron/data/damodaran_margin_data_serialized.zip +0 -0
  14. {mergeron-2025.739319.3 → mergeron-2025.739341.8}/src/mergeron/gen/__init__.py +32 -41
  15. {mergeron-2025.739319.3 → mergeron-2025.739341.8}/src/mergeron/gen/data_generation.py +19 -23
  16. {mergeron-2025.739319.3 → mergeron-2025.739341.8}/src/mergeron/gen/data_generation_functions.py +27 -38
  17. {mergeron-2025.739319.3 → mergeron-2025.739341.8}/src/mergeron/gen/enforcement_stats.py +144 -23
  18. {mergeron-2025.739319.3 → mergeron-2025.739341.8}/src/mergeron/gen/upp_tests.py +4 -9
  19. mergeron-2025.739319.3/PKG-INFO +0 -174
  20. mergeron-2025.739319.3/README.rst +0 -132
  21. mergeron-2025.739319.3/src/mergeron/core/empirical_margin_distribution.py +0 -270
  22. mergeron-2025.739319.3/src/mergeron/data/damodaran_margin_data.xls +0 -0
  23. mergeron-2025.739319.3/src/mergeron/demo/__init__.py +0 -3
  24. mergeron-2025.739319.3/src/mergeron/demo/visualize_empirical_margin_distribution.py +0 -94
  25. {mergeron-2025.739319.3 → mergeron-2025.739341.8}/src/mergeron/data/ftc_merger_investigations_data.zip +0 -0
  26. {mergeron-2025.739319.3 → mergeron-2025.739341.8}/src/mergeron/py.typed +0 -0
@@ -0,0 +1,94 @@
1
+ Metadata-Version: 2.3
2
+ Name: mergeron
3
+ Version: 2025.739341.8
4
+ Summary: Python for analyzing merger enforcement policy
5
+ License: MIT
6
+ Keywords: merger policy analysis,merger guidelines,merger screening,policy presumptions,concentration standards,upward pricing pressure,GUPPI
7
+ Author: Murthy Kambhampaty
8
+ Author-email: smk@capeconomics.com
9
+ Requires-Python: >=3.12
10
+ Classifier: Development Status :: 4 - Beta
11
+ Classifier: Environment :: Console
12
+ Classifier: Intended Audience :: End Users/Desktop
13
+ Classifier: Intended Audience :: Science/Research
14
+ Classifier: License :: OSI Approved :: MIT License
15
+ Classifier: Operating System :: OS Independent
16
+ Classifier: Programming Language :: Python
17
+ Classifier: Programming Language :: Python :: 3
18
+ Classifier: Programming Language :: Python :: 3 :: Only
19
+ Classifier: Programming Language :: Python :: 3.12
20
+ Classifier: Programming Language :: Python :: Implementation :: CPython
21
+ Requires-Dist: aenum (>=3.1.15,<4.0.0)
22
+ Requires-Dist: attrs (>=23.2)
23
+ Requires-Dist: bs4 (>=0.0.1)
24
+ Requires-Dist: certifi (>=2023.11.17)
25
+ Requires-Dist: h5py (>=3.13.0,<4.0.0)
26
+ Requires-Dist: jinja2 (>=3.1)
27
+ Requires-Dist: joblib (>=1.3)
28
+ Requires-Dist: linuxdoc (>=20240924,<20240925)
29
+ Requires-Dist: lxml (>=5.3.1,<6.0.0)
30
+ Requires-Dist: matplotlib (>=3.8)
31
+ Requires-Dist: mpmath (>=1.3)
32
+ Requires-Dist: python-calamine (>=0.3.1,<0.4.0)
33
+ Requires-Dist: ruamel-yaml (>=0.18.10,<0.19.0)
34
+ Requires-Dist: scipy (>=1.12)
35
+ Requires-Dist: sympy (>=1.12)
36
+ Requires-Dist: types-beautifulsoup4 (>=4.11.2)
37
+ Requires-Dist: urllib3 (>=2.2.2,<3.0.0)
38
+ Project-URL: Documentation, https://capeconomics.github.io/mergeron/
39
+ Project-URL: Repository, https://github.com/capeconomics/mergeron.git
40
+ Description-Content-Type: text/x-rst
41
+
42
+ mergeron: Python for analyzing merger enforcement policy
43
+ ========================================================
44
+
45
+
46
+ .. image:: https://img.shields.io/pypi/v/mergeron
47
+ :alt: PyPI - Package Version
48
+ :target: https://pypi.python.org/pypi/mergeron/
49
+ .. image:: https://img.shields.io/pypi/pyversions/mergeron
50
+ :alt: PyPI - Python Version
51
+ :target: https://pypi.python.org/pypi/mergeron/
52
+ .. image:: https://img.shields.io/pypi/status/mergeron
53
+ :alt: PyPI - Package status
54
+ :target: https://pypi.python.org/pypi/mergeron/
55
+
56
+ |
57
+
58
+ .. image:: https://img.shields.io/endpoint?url=https://python-poetry.org/badge/v0.json
59
+ :target: https://python-poetry.org/
60
+ .. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json
61
+ :target: https://github.com/astral-sh/ruff
62
+ .. image:: https://www.mypy-lang.org/static/mypy_badge.svg
63
+ :target: https://mypy-lang.org/
64
+ .. image:: https://img.shields.io/badge/code%20style-black-000000
65
+ :target: https://github.com/psf/black
66
+ .. image:: https://img.shields.io/badge/License-MIT-yellow
67
+ :alt: License: MIT
68
+ :target: https://opensource.org/licenses/MIT
69
+
70
+
71
+ *Visualize* the sets of mergers falling within specified concentration and diversion-ratio thresholds. *Analyze* merger investigations data published by the U.S. Federal Trade Commission in various reports on extended merger investigations (Second Requests) during 1996 to 2011.
72
+ *Generate* data under specified distributions of firm counts, market shares, price-cost margins, and prices, optionally imposing equilibrium conditions for Bertrand oligopoly with MNL demand and or restrictions implied by statutory filing thresholds. *Compute* intrinsic enforcement rates or
73
+ intrinsic clearance rates using the generated data, given thresholds for
74
+ concentration;
75
+ diversion ratio;
76
+ gross upward pricing pressure (GUPPI);
77
+ critical marginal cost reduction (CMCR); and
78
+ illustrative price rise (IPR).
79
+
80
+ Installation
81
+ ------------
82
+
83
+ To install the package, use the following shell command:
84
+
85
+ .. code:: bash
86
+
87
+ pip install mergeron
88
+
89
+
90
+ Documentation
91
+ -------------
92
+
93
+ Usage guide and API reference available `here <https://capeconomics.github.io/mergeron/>`_.
94
+
@@ -0,0 +1,52 @@
1
+ mergeron: Python for analyzing merger enforcement policy
2
+ ========================================================
3
+
4
+
5
+ .. image:: https://img.shields.io/pypi/v/mergeron
6
+ :alt: PyPI - Package Version
7
+ :target: https://pypi.python.org/pypi/mergeron/
8
+ .. image:: https://img.shields.io/pypi/pyversions/mergeron
9
+ :alt: PyPI - Python Version
10
+ :target: https://pypi.python.org/pypi/mergeron/
11
+ .. image:: https://img.shields.io/pypi/status/mergeron
12
+ :alt: PyPI - Package status
13
+ :target: https://pypi.python.org/pypi/mergeron/
14
+
15
+ |
16
+
17
+ .. image:: https://img.shields.io/endpoint?url=https://python-poetry.org/badge/v0.json
18
+ :target: https://python-poetry.org/
19
+ .. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json
20
+ :target: https://github.com/astral-sh/ruff
21
+ .. image:: https://www.mypy-lang.org/static/mypy_badge.svg
22
+ :target: https://mypy-lang.org/
23
+ .. image:: https://img.shields.io/badge/code%20style-black-000000
24
+ :target: https://github.com/psf/black
25
+ .. image:: https://img.shields.io/badge/License-MIT-yellow
26
+ :alt: License: MIT
27
+ :target: https://opensource.org/licenses/MIT
28
+
29
+
30
+ *Visualize* the sets of mergers falling within specified concentration and diversion-ratio thresholds. *Analyze* merger investigations data published by the U.S. Federal Trade Commission in various reports on extended merger investigations (Second Requests) during 1996 to 2011.
31
+ *Generate* data under specified distributions of firm counts, market shares, price-cost margins, and prices, optionally imposing equilibrium conditions for Bertrand oligopoly with MNL demand and or restrictions implied by statutory filing thresholds. *Compute* intrinsic enforcement rates or
32
+ intrinsic clearance rates using the generated data, given thresholds for
33
+ concentration;
34
+ diversion ratio;
35
+ gross upward pricing pressure (GUPPI);
36
+ critical marginal cost reduction (CMCR); and
37
+ illustrative price rise (IPR).
38
+
39
+ Installation
40
+ ------------
41
+
42
+ To install the package, use the following shell command:
43
+
44
+ .. code:: bash
45
+
46
+ pip install mergeron
47
+
48
+
49
+ Documentation
50
+ -------------
51
+
52
+ Usage guide and API reference available `here <https://capeconomics.github.io/mergeron/>`_.
@@ -1,7 +1,9 @@
1
- [tool.poetry]
1
+ [project]
2
2
  name = "mergeron"
3
- authors = ["Murthy Kambhampaty <smk@capeconomics.com>"]
4
- description = "Analyze merger enforcement policy using Python"
3
+ authors = [
4
+ { name = "Murthy Kambhampaty", email = "smk@capeconomics.com" },
5
+ ]
6
+ description = "Python for analyzing merger enforcement policy"
5
7
  readme = "README.rst"
6
8
  license = "MIT"
7
9
  keywords = [
@@ -13,7 +15,7 @@ keywords = [
13
15
  "upward pricing pressure",
14
16
  "GUPPI",
15
17
  ]
16
- version = "2025.739319.3"
18
+ version = "2025.739341.8"
17
19
 
18
20
  # Classifiers list: https://pypi.org/classifiers/
19
21
  classifiers = [
@@ -30,6 +32,14 @@ classifiers = [
30
32
  "Programming Language :: Python :: Implementation :: CPython",
31
33
  ]
32
34
 
35
+ requires-python = ">=3.12"
36
+
37
+ dynamic = ["dependencies"]
38
+
39
+ [project.urls]
40
+ Documentation = "https://capeconomics.github.io/mergeron/"
41
+ Repository = "https://github.com/capeconomics/mergeron.git"
42
+
33
43
  [build-system]
34
44
  requires = ["poetry-core"]
35
45
  build-backend = "poetry.core.masonry.api"
@@ -45,37 +55,38 @@ jinja2 = ">=3.1"
45
55
  joblib = ">=1.3"
46
56
  matplotlib = ">=3.8"
47
57
  mpmath = ">=1.3"
48
- msgpack = ">=1.0"
49
- msgpack-numpy = ">=0.4"
50
58
  python = "^3.12"
51
59
  scipy = ">=1.12"
52
60
  sympy = ">=1.12"
53
- xlsxwriter = ">=3.1"
54
61
  certifi = ">=2023.11.17"
55
62
  types-beautifulsoup4 = ">=4.11.2"
56
- xlrd = "^2.0.1" # Needed to read margin data
57
63
  urllib3 = "^2.2.2"
58
64
  ruamel-yaml = "^0.18.10"
59
65
  h5py = "^3.13.0"
66
+ linuxdoc = "^20240924"
67
+ lxml = "^5.3.1"
68
+ python-calamine = "^0.3.1"
60
69
 
61
70
 
62
71
  [tool.poetry.group.dev.dependencies]
63
- icecream = ">=2.1.0"
64
72
  jinja2 = ">=3.1.5"
65
73
  mypy = ">=1.8"
66
- openpyxl = ">=3.1.2"
67
- pendulum = ">=3.0.0"
68
74
  ruff = ">=0.5"
69
75
  poetry-plugin-export = "^1.8.0"
70
76
  pytest = ">=8.0"
71
- sphinx = ">8.2"
77
+ sphinx = ">8.1"
72
78
  semver = ">=3.0"
73
79
  sphinx-autodoc-typehints = ">=2.0.0"
74
80
  sphinx-autoapi = ">=3.6.0"
75
81
  sphinx-immaterial = ">0.11"
76
82
  pipdeptree = ">=2.15.1"
77
- types-openpyxl = ">=3.0.0"
78
83
  virtualenv = ">=20.28.0"
84
+ pytest-cov = "^6.0.0"
85
+ pendulum = "^3.0.0"
86
+ rstcheck = "^6.2.4"
87
+ ipython-pygments-lexers = "^1.1.1"
88
+
89
+
79
90
  [tool.ruff]
80
91
 
81
92
  # Exclude a variety of commonly ignored directories.
@@ -112,8 +123,11 @@ indent-width = 4
112
123
 
113
124
 
114
125
  [tool.ruff.lint]
115
- # From, https://github.com/sphinx-doc/sphinx/blob/master/pyproject.toml
126
+ # Borrowed primaryly from,
127
+ # https://github.com/sphinx-doc/sphinx/blob/master/pyproject.toml
128
+
116
129
  select = [
130
+ "D", # docstrings
117
131
  "E", # pycodestyle
118
132
  "F", # Pyflakes
119
133
  "I", # isort
@@ -139,13 +153,14 @@ ignore = [
139
153
  # flake-8 line length
140
154
  "E501",
141
155
  # pycodestyle
142
- 'E741',
156
+ "E741",
143
157
  # flake8-bugbear
144
- 'B006',
145
- 'B023',
158
+ "B006",
159
+ "B023",
146
160
  # flake8-bugbear opinionated (disabled by default in flake8)
147
- 'B904',
148
- 'B905',
161
+ "B904",
162
+ "B905",
163
+ "PLC2801", # unnecessary-dunder-call
149
164
  "PLR2004", # avoid magic values
150
165
  # RUF
151
166
  "RUF052",
@@ -160,6 +175,9 @@ ignore = [
160
175
  ]
161
176
  isort.split-on-trailing-comma = false
162
177
 
178
+ [tool.ruff.lint.pydocstyle]
179
+ convention = "numpy"
180
+
163
181
  [tool.ruff.format]
164
182
  quote-style = "double"
165
183
  indent-style = "space"
@@ -188,13 +206,17 @@ plugins = "numpy.typing.mypy_plugin"
188
206
  log_auto_indent = 4
189
207
  minversion = "8.0"
190
208
  testpaths = ["tests"]
191
- addopts = ["--import-mode=importlib"]
209
+ addopts = [
210
+ "--import-mode=importlib",
211
+ "--cov=mergeron",
212
+ "--cov-report=html:../tests/cov_html",
213
+ ]
192
214
  filterwarnings = [
193
215
  "all",
194
216
  "ignore::DeprecationWarning:dateutil.tz.tz",
195
217
  "ignore::DeprecationWarning:jinja2.lexer",
196
218
  "ignore::DeprecationWarning:joblib._utils",
197
- "ignore::DeprecationWarning:openpyxl.packaging.core",
219
+ "ignore::RuntimeWarning:mergeron.core.empirical_margin_distribution",
198
220
  "ignore::RuntimeWarning:mergeron.core.proportions_tests",
199
221
  "ignore::RuntimeWarning:mergeron.gen.enforcement_stats",
200
222
  ]
@@ -1,3 +1,5 @@
1
+ """Variables, types, objects and functions used throughout the package."""
2
+
1
3
  from __future__ import annotations
2
4
 
3
5
  import enum
@@ -6,13 +8,14 @@ from multiprocessing import cpu_count
6
8
  from pathlib import Path
7
9
  from typing import Any, Literal
8
10
 
11
+ import attrs
9
12
  import numpy as np
10
13
  from numpy.typing import NDArray
11
14
  from ruamel import yaml
12
15
 
13
- _PKG_NAME: str = Path(__file__).parent.stem
16
+ _PKG_NAME: str = Path(__file__).parent.name
14
17
 
15
- VERSION = "2025.739319.3"
18
+ VERSION = "2025.739341.8"
16
19
 
17
20
  __version__ = VERSION
18
21
 
@@ -34,7 +37,7 @@ EMPTY_ARRAYINT = np.array([], int)
34
37
 
35
38
  NTHREADS = 2 * cpu_count()
36
39
 
37
- PKG_ATTRS_MAP: dict[str, object] = {}
40
+ PKG_ATTRS_MAP: dict[str, type] = {}
38
41
 
39
42
  np.set_printoptions(precision=24, floatmode="fixed")
40
43
 
@@ -75,23 +78,21 @@ this_yaml.indent(mapping=2, sequence=4, offset=2)
75
78
  def yaml_rt_mapper(
76
79
  _c: yaml.constructor.RoundTripConstructor, _n: yaml.MappingNode
77
80
  ) -> Mapping[str, Any]:
78
- """
79
- Constructs a mapping from a mapping node with the RoundTripConstructor
80
-
81
- """
81
+ """Construct mapping from a mapping node with the RoundTripConstructor."""
82
82
  data_: Mapping[str, Any] = yaml.constructor.CommentedMap()
83
83
  _c.construct_mapping(_n, maptyp=data_, deep=True)
84
84
  return data_
85
85
 
86
86
 
87
- def yamelize_attrs(
88
- _typ: object, /, *, attr_map: Mapping[str, object] = PKG_ATTRS_MAP
89
- ) -> None:
87
+ def yamelize_attrs(_typ: type, /, *, attr_map: dict[str, type] = PKG_ATTRS_MAP) -> None:
90
88
  """Add yaml representer, constructor for attrs-defined class.
91
89
 
92
- Applying this function, attributes with property, `init=False` are
93
- not serialized to YAML.
90
+ Attributes with property, `init=False` are not serialized/deserialized
91
+ to YAML by the functions defined here. These attributes can, of course,
92
+ be dumped to stand-alone (YAML) representation, and deserialized from there.
94
93
  """
94
+ if not attrs.has(_typ):
95
+ raise ValueError(f"Object {_typ} is not attrs-defined")
95
96
 
96
97
  attr_map |= {_typ.__name__: _typ}
97
98
 
@@ -99,9 +100,6 @@ def yamelize_attrs(
99
100
  _typ,
100
101
  lambda _r, _d: _r.represent_mapping(
101
102
  f"!{_d.__class__.__name__}",
102
- # construct mapping, rather than calling attrs.asdict(),
103
- # to use yaml representers defined in this package for
104
- # "upstream" objects
105
103
  {_a.name: getattr(_d, _a.name) for _a in _d.__attrs_attrs__ if _a.init},
106
104
  ),
107
105
  )
@@ -113,12 +111,13 @@ def yamelize_attrs(
113
111
 
114
112
  @this_yaml.register_class
115
113
  class Enameled(enum.Enum):
116
- """Add YAML representer, constructor for enum.Enum"""
114
+ """Add YAML representer, constructor for enum.Enum."""
117
115
 
118
116
  @classmethod
119
117
  def to_yaml(
120
- cls, _r: yaml.representer.RoundTripRepresenter, _d: object[enum.EnumType]
118
+ cls, _r: yaml.representer.RoundTripRepresenter, _d: enum.Enum
121
119
  ) -> yaml.ScalarNode:
120
+ """Serialize enumerations by .name, not .value."""
122
121
  return _r.represent_scalar(
123
122
  f"!{super().__getattribute__(cls, '__name__')}", f"{_d.name}"
124
123
  )
@@ -126,8 +125,10 @@ class Enameled(enum.Enum):
126
125
  @classmethod
127
126
  def from_yaml(
128
127
  cls, _c: yaml.constructor.RoundTripConstructor, _n: yaml.ScalarNode
129
- ) -> object[enum.EnumType]:
130
- return super().__getattribute__(cls, _n.value)
128
+ ) -> enum.EnumType:
129
+ """Deserialize enumeration."""
130
+ retval: enum.EnumType = super().__getattribute__(cls, _n.value)
131
+ return retval
131
132
 
132
133
 
133
134
  @this_yaml.register_class
@@ -181,10 +182,7 @@ class RECForm(str, Enameled):
181
182
  @this_yaml.register_class
182
183
  @enum.unique
183
184
  class UPPAggrSelector(str, Enameled):
184
- """
185
- Aggregator for GUPPI and diversion ratio estimates.
186
-
187
- """
185
+ """Aggregator for GUPPI and diversion ratio estimates."""
188
186
 
189
187
  AVG = "average"
190
188
  CPA = "cross-product-share weighted average"
@@ -1,3 +1,5 @@
1
+ """Constants, types, objects and functions used within this sub-package."""
2
+
1
3
  from __future__ import annotations
2
4
 
3
5
  from collections.abc import Mapping
@@ -8,6 +10,7 @@ from typing import Any
8
10
  import mpmath # type: ignore
9
11
  import numpy as np
10
12
  from attrs import cmp_using, field, frozen
13
+ from numpy.random import PCG64DXSM
11
14
 
12
15
  from .. import ( # noqa: TID252
13
16
  VERSION,
@@ -20,13 +23,15 @@ from .. import ( # noqa: TID252
20
23
 
21
24
  __version__ = VERSION
22
25
 
26
+ DEFAULT_BITGENERATOR = PCG64DXSM
27
+
23
28
  type MPFloat = mpmath.ctx_mp_python.mpf
24
- type MPMatrix = mpmath.matrix # type: ignore
29
+ type MPMatrix = mpmath.matrices.matrices._matrix
25
30
 
26
31
 
27
32
  @frozen
28
33
  class GuidelinesBoundary:
29
- """Output of a Guidelines boundary function."""
34
+ """Represents Guidelines boundary analytically."""
30
35
 
31
36
  coordinates: ArrayDouble
32
37
  """Market-share pairs as Cartesian coordinates of points on the boundary."""
@@ -35,8 +40,19 @@ class GuidelinesBoundary:
35
40
  """Area under the boundary."""
36
41
 
37
42
 
43
+ @frozen
44
+ class GuidelinesBoundaryCallable:
45
+ """A function to generate Guidelines boundary points, along with area and knot."""
46
+
47
+ boundary_function: Callable[[ArrayDouble], ArrayDouble]
48
+ area: float
49
+ s_naught: float = 0
50
+
51
+
38
52
  @frozen
39
53
  class INVTableData:
54
+ """Represents individual table of FTC merger investigations data."""
55
+
40
56
  industry_group: str
41
57
  additional_evidence: str
42
58
  data_array: ArrayBIGINT = field(eq=cmp_using(eq=np.array_equal))
@@ -45,7 +61,7 @@ class INVTableData:
45
61
  type INVData = MappingProxyType[
46
62
  str, MappingProxyType[str, MappingProxyType[str, INVTableData]]
47
63
  ]
48
- type INVData_in = Mapping[str, Mapping[str, Mapping[str, INVTableData]]]
64
+ type INVData_in = dict[str, dict[str, dict[str, INVTableData]]]
49
65
 
50
66
  yamelize_attrs(INVTableData)
51
67
 
@@ -91,14 +107,14 @@ _, _ = (
91
107
 
92
108
  def _dict_from_mapping(_p: Mapping[Any, Any], /) -> dict[Any, Any]:
93
109
  retval: dict[Any, Any] = {}
94
- for _k, _v in _p.items(): # for subit in it:
110
+ for _k, _v in _p.items():
95
111
  retval |= {_k: _dict_from_mapping(_v)} if isinstance(_v, Mapping) else {_k: _v}
96
112
  return retval
97
113
 
98
114
 
99
115
  def _mappingproxy_from_mapping(_p: Mapping[Any, Any], /) -> MappingProxyType[Any, Any]:
100
116
  retval: dict[Any, Any] = {}
101
- for _k, _v in _p.items(): # for subit in it:
117
+ for _k, _v in _p.items():
102
118
  retval |= (
103
119
  {_k: _mappingproxy_from_mapping(_v)}
104
120
  if isinstance(_v, Mapping)