mergeron 2024.738973.0__py3-none-any.whl → 2024.739079.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mergeron might be problematic. Click here for more details.

Files changed (37) hide show
  1. mergeron/__init__.py +28 -3
  2. mergeron/core/__init__.py +2 -77
  3. mergeron/core/damodaran_margin_data.py +66 -52
  4. mergeron/core/excel_helper.py +39 -37
  5. mergeron/core/ftc_merger_investigations_data.py +66 -35
  6. mergeron/core/guidelines_boundaries.py +261 -234
  7. mergeron/core/guidelines_boundary_functions.py +182 -27
  8. mergeron/core/guidelines_boundary_functions_extra.py +17 -14
  9. mergeron/core/proportions_tests.py +2 -4
  10. mergeron/core/pseudorandom_numbers.py +6 -11
  11. mergeron/data/__init__.py +3 -0
  12. mergeron/data/damodaran_margin_data.xls +0 -0
  13. mergeron/data/damodaran_margin_data_dict.msgpack +0 -0
  14. mergeron/{jinja_LaTex_templates/setup_tikz_tables.tex.jinja2 → data/jinja2_LaTeX_templates/setup_tikz_tables.tex} +45 -50
  15. mergeron/demo/__init__.py +3 -0
  16. mergeron/demo/visualize_empirical_margin_distribution.py +88 -0
  17. mergeron/ext/__init__.py +2 -4
  18. mergeron/ext/tol_colors.py +3 -3
  19. mergeron/gen/__init__.py +53 -46
  20. mergeron/gen/_data_generation_functions.py +28 -93
  21. mergeron/gen/data_generation.py +20 -24
  22. mergeron/gen/{investigations_stats.py → enforcement_stats.py} +59 -57
  23. mergeron/gen/market_sample.py +6 -10
  24. mergeron/gen/upp_tests.py +29 -26
  25. mergeron-2024.739079.10.dist-info/METADATA +109 -0
  26. mergeron-2024.739079.10.dist-info/RECORD +36 -0
  27. mergeron/core/InCommon RSA Server CA cert chain.pem +0 -68
  28. mergeron-2024.738973.0.dist-info/METADATA +0 -108
  29. mergeron-2024.738973.0.dist-info/RECORD +0 -32
  30. /mergeron/{core → data}/ftc_invdata.msgpack +0 -0
  31. /mergeron/{jinja_LaTex_templates → data/jinja2_LaTeX_templates}/clrrate_cis_summary_table_template.tex.jinja2 +0 -0
  32. /mergeron/{jinja_LaTex_templates → data/jinja2_LaTeX_templates}/ftcinvdata_byhhianddelta_table_template.tex.jinja2 +0 -0
  33. /mergeron/{jinja_LaTex_templates → data/jinja2_LaTeX_templates}/ftcinvdata_summary_table_template.tex.jinja2 +0 -0
  34. /mergeron/{jinja_LaTex_templates → data/jinja2_LaTeX_templates}/ftcinvdata_summarypaired_table_template.tex.jinja2 +0 -0
  35. /mergeron/{jinja_LaTex_templates → data/jinja2_LaTeX_templates}/mergeron.cls +0 -0
  36. /mergeron/{jinja_LaTex_templates → data/jinja2_LaTeX_templates}/mergeron_table_collection_template.tex.jinja2 +0 -0
  37. {mergeron-2024.738973.0.dist-info → mergeron-2024.739079.10.dist-info}/WHEEL +0 -0
@@ -5,31 +5,32 @@ Methods to generate data for analyzing merger enforcement policy.
5
5
 
6
6
  from __future__ import annotations
7
7
 
8
- from importlib.metadata import version
8
+ from typing import NamedTuple
9
9
 
10
10
  import numpy as np
11
11
  from numpy.random import SeedSequence
12
12
  from numpy.typing import NDArray
13
13
 
14
- from .. import _PKG_NAME, RECConstants # noqa: TID252
14
+ from .. import VERSION, RECConstants # noqa: TID252
15
15
  from . import (
16
16
  EMPTY_ARRAY_DEFAULT,
17
17
  FM2Constants,
18
18
  MarketDataSample,
19
19
  MarketSpec,
20
- PRIConstants,
20
+ PriceConstants,
21
21
  SHRConstants,
22
22
  SSZConstants,
23
23
  )
24
- from ._data_generation_functions import (
25
- _gen_market_shares_dirichlet, # noqa: F401 easter-egg for external modules
26
- _gen_market_shares_uniform, # noqa: F401 easter-egg for external modules
27
- _gen_pcm_data,
28
- _gen_price_data,
29
- _gen_share_data,
30
- )
24
+ from ._data_generation_functions import _gen_pcm_data, _gen_price_data, _gen_share_data
25
+
26
+ __version__ = VERSION
27
+
31
28
 
32
- __version__ = version(_PKG_NAME)
29
+ class SeedSequenceData(NamedTuple):
30
+ mktshr_rng_seed_seq: SeedSequence
31
+ pcm_rng_seed_seq: SeedSequence
32
+ fcount_rng_seed_seq: SeedSequence | None
33
+ pr_rng_seed_seq: SeedSequence | None
33
34
 
34
35
 
35
36
  def gen_market_sample(
@@ -53,7 +54,7 @@ def gen_market_sample(
53
54
  2.) price-cost margins
54
55
  3.) firm-counts, from :code:`[2, 2 + len(firm_counts_weights)]`,
55
56
  weighted by :code:`firm_counts_weights`, where relevant
56
- 4.) prices, if :code:`price_spec == PRIConstants.ZERO`.
57
+ 4.) prices, if :code:`price_spec == PriceConstants.ZERO`.
57
58
 
58
59
  Parameters
59
60
  ----------
@@ -74,8 +75,6 @@ def gen_market_sample(
74
75
 
75
76
  """
76
77
 
77
- _mkt_sample_spec = _mkt_sample_spec or MarketSpec()
78
-
79
78
  _recapture_form = _mkt_sample_spec.share_spec.recapture_form
80
79
  _recapture_rate = _mkt_sample_spec.share_spec.recapture_rate
81
80
  _dist_type_mktshr = _mkt_sample_spec.share_spec.dist_type
@@ -141,9 +140,9 @@ def gen_market_sample(
141
140
  # Generate margin data
142
141
  _pcm_data = _gen_pcm_data(
143
142
  _mktshr_array[:, :2],
144
- _mkt_sample_spec,
145
143
  _price_array,
146
144
  _aggregate_purchase_prob,
145
+ _mkt_sample_spec,
147
146
  _pcm_rng_seed_seq,
148
147
  nthreads,
149
148
  )
@@ -186,14 +185,14 @@ def gen_market_sample(
186
185
  def parse_seed_seq_list(
187
186
  _sseq_list: list[SeedSequence] | None,
188
187
  _mktshr_dist_type: SHRConstants,
189
- _price_spec: PRIConstants,
188
+ _price_spec: PriceConstants,
190
189
  /,
191
- ) -> tuple[SeedSequence, SeedSequence, SeedSequence | None, SeedSequence | None]:
190
+ ) -> SeedSequenceData:
192
191
  """Initialize RNG seed sequences to ensure independence of distinct random streams."""
193
192
  _fcount_rng_seed_seq: SeedSequence | None = None
194
193
  _pr_rng_seed_seq: SeedSequence | None = None
195
194
 
196
- if _price_spec == PRIConstants.ZERO:
195
+ if _price_spec == PriceConstants.ZERO:
197
196
  _pr_rng_seed_seq = _sseq_list.pop() if _sseq_list else SeedSequence(pool_size=8)
198
197
 
199
198
  if _mktshr_dist_type == SHRConstants.UNI:
@@ -212,11 +211,8 @@ def parse_seed_seq_list(
212
211
  else (SeedSequence(pool_size=8) for _ in range(_seed_count))
213
212
  )
214
213
 
215
- return (
216
- _mktshr_rng_seed_seq,
217
- _pcm_rng_seed_seq,
218
- _fcount_rng_seed_seq,
219
- _pr_rng_seed_seq,
214
+ return SeedSequenceData(
215
+ _mktshr_rng_seed_seq, _pcm_rng_seed_seq, _fcount_rng_seed_seq, _pr_rng_seed_seq
220
216
  )
221
217
 
222
218
 
@@ -230,7 +226,7 @@ def gen_divr_array(
230
226
  """
231
227
  Given merging-firm shares and related parameters, return diverion ratios.
232
228
 
233
- If recapture is specified as "Outside-in" (RECConstants.OUTIN), then the
229
+ If recapture is specified as :attr:`mergeron.RECConstants.OUTIN`, then the
234
230
  choice-probability for the outside good must be supplied.
235
231
 
236
232
  Parameters
@@ -1,5 +1,5 @@
1
1
  """
2
- Methods to format and print summary data on merger enforcement patterns.
2
+ Methods to format and print summary statistics on merger enforcement patterns.
3
3
 
4
4
  """
5
5
 
@@ -7,7 +7,7 @@ import enum
7
7
  import shutil
8
8
  import subprocess
9
9
  from collections.abc import Mapping, Sequence
10
- from importlib.metadata import version
10
+ from importlib import resources
11
11
  from pathlib import Path
12
12
  from types import SimpleNamespace
13
13
 
@@ -17,12 +17,12 @@ from jinja2 import Environment, FileSystemLoader, Template, select_autoescape
17
17
  from numpy.typing import NDArray
18
18
  from scipy.interpolate import interp1d # type: ignore
19
19
 
20
- from .. import _PKG_NAME, DATA_DIR # noqa: TID252
20
+ from .. import _PKG_NAME, DATA_DIR, VERSION # noqa: TID252
21
21
  from ..core import ftc_merger_investigations_data as fid # noqa: TID252
22
22
  from ..core.proportions_tests import propn_ci # noqa: TID252
23
- from . import TF, TI, INVResolution
23
+ from . import INVResolution
24
24
 
25
- __version__ = version(_PKG_NAME)
25
+ __version__ = VERSION
26
26
 
27
27
 
28
28
  @enum.unique
@@ -108,38 +108,42 @@ class StatsContainer(SimpleNamespace):
108
108
 
109
109
  # Define the latex jinja environment
110
110
  # http://eosrei.net/articles/2015/11/latex-templates-python-and-jinja2-generate-pdfs
111
- latex_jinja_env = Environment(
112
- block_start_string=R"((*",
113
- block_end_string="*))",
114
- variable_start_string=R"\JINVAR{",
115
- variable_end_string="}",
116
- comment_start_string=R"((#", # r'#{',
117
- comment_end_string=R"#))", # '}',
118
- line_statement_prefix="##",
119
- line_comment_prefix="%#",
120
- trim_blocks=True,
121
- lstrip_blocks=True,
122
- autoescape=select_autoescape(disabled_extensions=("tex.jinja2",)),
123
- loader=FileSystemLoader(Path(__file__).parents[1] / "jinja_LaTex_templates"),
124
- )
111
+ with resources.as_file(
112
+ resources.files(f"{_PKG_NAME}.data.jinja2_LaTeX_templates")
113
+ ) as _tmpl_folder:
114
+ latex_jinja_env = Environment(
115
+ block_start_string=R"((*",
116
+ block_end_string="*))",
117
+ variable_start_string=R"\JINVAR{",
118
+ variable_end_string="}",
119
+ comment_start_string=R"((#", # r'#{',
120
+ comment_end_string=R"#))", # '}',
121
+ line_statement_prefix="##",
122
+ line_comment_prefix="%#",
123
+ trim_blocks=True,
124
+ lstrip_blocks=True,
125
+ autoescape=select_autoescape(disabled_extensions=("tex.jinja2",)),
126
+ loader=FileSystemLoader(_tmpl_folder),
127
+ )
125
128
 
126
129
  # Place files related to rendering latex in output data directory
127
130
  if not (_out_path := DATA_DIR.joinpath(f"{_PKG_NAME}.cls")).is_file():
128
- shutil.copyfile(
129
- Path(__file__).parents[1].joinpath("jinja_LaTex_templates", "mergeron.cls"),
130
- _out_path,
131
- )
131
+ with resources.as_file(
132
+ resources.files(f"{_PKG_NAME}.data.jinja2_LaTeX_templates").joinpath(
133
+ "{_PKG_NAME}.cls"
134
+ )
135
+ ) as _in_path:
136
+ shutil.copy2(_in_path, _out_path)
132
137
 
133
138
 
134
139
  if not (_DOTTEX := DATA_DIR / Rf"{_PKG_NAME}_TikZTableSettings.tex").is_file():
135
140
  # Write to dottex
136
- with _DOTTEX.open("w", encoding="UTF-8") as _table_helper_dottex:
137
- _table_helper_dottex.write(
138
- latex_jinja_env.get_template("setup_tikz_tables.tex.jinja2").render(
139
- tmpl_data=StatsContainer()
140
- )
141
+ with resources.as_file(
142
+ resources.files(f"{_PKG_NAME}.data.jinja2_LaTeX_templates").joinpath(
143
+ "setup_tikz_tables.tex"
141
144
  )
142
- print("\n", file=_table_helper_dottex)
145
+ ) as _tex_path:
146
+ shutil.copy2(_tex_path, _DOTTEX)
143
147
 
144
148
 
145
149
  # Parameters and functions to interpolate selected HHI and ΔHHI values
@@ -172,7 +176,7 @@ HMG_PRESUMPTION_ZONE_DICT = {
172
176
  }
173
177
 
174
178
  ZONE_VALS = np.unique(
175
- np.row_stack([
179
+ np.vstack([
176
180
  tuple(HMG_PRESUMPTION_ZONE_DICT[_k].values())
177
181
  for _k in HMG_PRESUMPTION_ZONE_DICT
178
182
  ]),
@@ -375,7 +379,7 @@ def table_no_lku(
375
379
  /,
376
380
  ) -> str:
377
381
  if _table_ind_group not in (
378
- _igl := [_data_array_dict_sub[_v].ind_grp for _v in _data_array_dict_sub]
382
+ _igl := [_data_array_dict_sub[_v].industry_group for _v in _data_array_dict_sub]
379
383
  ):
380
384
  raise ValueError(
381
385
  f"Invalid value for industry group, {f'"{_table_ind_group}"'}."
@@ -386,19 +390,17 @@ def table_no_lku(
386
390
  _t
387
391
  for _t in _data_array_dict_sub
388
392
  if all((
389
- _data_array_dict_sub[_t].ind_grp == _table_ind_group,
390
- _data_array_dict_sub[_t].evid_cond == _table_evid_cond,
393
+ _data_array_dict_sub[_t].industry_group == _table_ind_group,
394
+ _data_array_dict_sub[_t].additional_evidence == _table_evid_cond,
391
395
  ))
392
396
  )
393
397
 
394
398
  return _tno
395
399
 
396
400
 
397
- def invres_cnts_byfirmcount(
398
- _cnts_array: NDArray[np.integer[TI]], /
399
- ) -> NDArray[np.int64]:
401
+ def invres_cnts_byfirmcount(_cnts_array: NDArray[np.int64], /) -> NDArray[np.int64]:
400
402
  _ndim_in = 1
401
- return np.row_stack([
403
+ return np.vstack([
402
404
  np.concatenate([
403
405
  (f,),
404
406
  np.einsum("ij->j", _cnts_array[_cnts_array[:, 0] == f][:, _ndim_in:]),
@@ -407,9 +409,9 @@ def invres_cnts_byfirmcount(
407
409
  ])
408
410
 
409
411
 
410
- def invres_cnts_bydelta(_cnts_array: NDArray[np.integer[TI]], /) -> NDArray[np.int64]:
412
+ def invres_cnts_bydelta(_cnts_array: NDArray[np.int64], /) -> NDArray[np.int64]:
411
413
  _ndim_in = 2
412
- return np.row_stack([
414
+ return np.vstack([
413
415
  np.concatenate([
414
416
  (f,),
415
417
  np.einsum("ij->j", _cnts_array[_cnts_array[:, 1] == f][:, _ndim_in:]),
@@ -418,9 +420,7 @@ def invres_cnts_bydelta(_cnts_array: NDArray[np.integer[TI]], /) -> NDArray[np.i
418
420
  ])
419
421
 
420
422
 
421
- def invres_cnts_byconczone(
422
- _cnts_array: NDArray[np.integer[TI]], /
423
- ) -> NDArray[np.int64]:
423
+ def invres_cnts_byconczone(_cnts_array: NDArray[np.int64], /) -> NDArray[np.int64]:
424
424
  # Prepare to tag clearance stats by presumption zone
425
425
  _hhi_zone_post_ranged = hhi_zone_post_ranger(_cnts_array[:, 0] / 1e4)
426
426
  _hhi_delta_ranged = hhi_delta_ranger(_cnts_array[:, 1] / 1e4)
@@ -453,7 +453,7 @@ def invres_cnts_byconczone(
453
453
 
454
454
  _conc_test = _level_test & _delta_test
455
455
 
456
- _cnts_byhhipostanddelta = np.row_stack((
456
+ _cnts_byhhipostanddelta = np.vstack((
457
457
  _cnts_byhhipostanddelta,
458
458
  np.array(
459
459
  (
@@ -475,7 +475,7 @@ def invres_cnts_byconczone(
475
475
  ])
476
476
  ).prod(axis=1) == 1
477
477
 
478
- _cnts_byconczone = np.row_stack((
478
+ _cnts_byconczone = np.vstack((
479
479
  _cnts_byconczone,
480
480
  np.concatenate(
481
481
  (
@@ -492,7 +492,7 @@ def invres_cnts_byconczone(
492
492
 
493
493
 
494
494
  def latex_tbl_invres_stats_1dim(
495
- _inparr: NDArray[np.floating[TF] | np.integer[TI]],
495
+ _inparr: NDArray[np.float64 | np.int64],
496
496
  _totals_row: int | None = None,
497
497
  /,
498
498
  *,
@@ -527,7 +527,7 @@ def latex_tbl_invres_stats_1dim(
527
527
  if sort_order == SortSelector.REV:
528
528
  _inparr = _inparr[::-1]
529
529
 
530
- _inparr = np.row_stack((_inparr, _in_totals_row))
530
+ _inparr = np.vstack((_inparr, _in_totals_row))
531
531
 
532
532
  _stats_hdr_list, _stats_dat_list = [], []
533
533
  for _stats_row in _inparr:
@@ -544,7 +544,7 @@ def latex_tbl_invres_stats_1dim(
544
544
 
545
545
 
546
546
  def latex_tbl_invres_stats_byzone(
547
- _inparr: NDArray[np.floating[TF] | np.integer[TI]],
547
+ _inparr: NDArray[np.float64 | np.int64],
548
548
  _totals_row: int | None = None,
549
549
  /,
550
550
  *,
@@ -559,7 +559,7 @@ def latex_tbl_invres_stats_byzone(
559
559
  _zone_str_keys = _zone_str_keys[:-1][::-1] + [_zone_str_keys[-1]]
560
560
 
561
561
  if _totals_row is None:
562
- _inparr = np.row_stack((
562
+ _inparr = np.vstack((
563
563
  _inparr,
564
564
  np.concatenate((
565
565
  [fid.TTL_KEY, -1, -1],
@@ -611,8 +611,8 @@ def latex_tbl_invres_stats_byzone(
611
611
 
612
612
 
613
613
  def _stats_formatted_row(
614
- _stats_row_cnt: NDArray[np.integer[TI]],
615
- _stats_row_tot: NDArray[np.integer[TI]],
614
+ _stats_row_cnt: NDArray[np.int64],
615
+ _stats_row_tot: NDArray[np.int64],
616
616
  _return_type_sel: StatsReturnSelector,
617
617
  /,
618
618
  ) -> list[list[str]]:
@@ -691,19 +691,21 @@ def render_table_pdf(
691
691
  )
692
692
  print("\n", file=_table_coll_file)
693
693
 
694
- _run_rc = subprocess.run(
695
- f"latexmk -f -quiet -synctex=0 -interaction=nonstopmode -file-line-error -pdflua {_table_coll_path}".split(), # noqa: S603
694
+ _run_rc = subprocess.run( # noqa: S603
695
+ f"latexmk -f -quiet -synctex=0 -interaction=nonstopmode -file-line-error -pdflua {_table_coll_path}".split(),
696
696
  check=True,
697
697
  cwd=DATA_DIR,
698
698
  )
699
699
  if _run_rc:
700
- subprocess.run(
701
- "latexmk -quiet -c".split(), # noqa: S603
702
- check=True,
703
- cwd=DATA_DIR,
704
- )
700
+ subprocess.run("latexmk -quiet -c".split(), check=True, cwd=DATA_DIR) # noqa: S603
705
701
  del _run_rc
706
702
 
707
703
  print(
708
704
  f"Tables rendered to path, {f"{Path(DATA_DIR / _table_coll_path).with_suffix(".pdf")}"}"
709
705
  )
706
+
707
+
708
+ if __name__ == "__main__":
709
+ print(
710
+ "This module provides methods to format and print summary statistics on merger enforcement patterns.."
711
+ )
@@ -5,18 +5,16 @@ Methods to generate data for analyzing merger enforcement policy.
5
5
 
6
6
  from __future__ import annotations
7
7
 
8
- from importlib.metadata import version
9
-
10
8
  from attrs import define
11
9
  from numpy.random import SeedSequence
12
10
 
13
- from .. import _PKG_NAME # noqa: TID252
14
- from ..core import guidelines_boundaries as gbl # noqa: TID252
11
+ from .. import VERSION # noqa: TID252
12
+ from ..core.guidelines_boundaries import HMGThresholds # noqa: TID252
15
13
  from . import MarketSpec, UPPTestRegime
16
14
  from .data_generation import gen_market_sample
17
15
  from .upp_tests import SaveData, invres_cnts, save_data_to_hdf5, sim_invres_cnts_ll
18
16
 
19
- __version__ = version(_PKG_NAME)
17
+ __version__ = VERSION
20
18
 
21
19
 
22
20
  @define(slots=False)
@@ -50,7 +48,7 @@ class MarketSample(MarketSpec):
50
48
 
51
49
  def estimate_invres_counts(
52
50
  self,
53
- _invres_parm_vec: gbl.HMGThresholds,
51
+ _invres_parm_vec: HMGThresholds,
54
52
  _upp_test_regime: UPPTestRegime,
55
53
  /,
56
54
  *,
@@ -73,7 +71,5 @@ class MarketSample(MarketSpec):
73
71
  self.invres_counts = invres_cnts(
74
72
  self.data, _invres_parm_vec, _upp_test_regime
75
73
  )
76
- if save_data_to_file:
77
- save_data_to_hdf5(
78
- self.invres_counts, save_data_to_file=save_data_to_file
79
- )
74
+ if save_data_to_file:
75
+ save_data_to_hdf5(self.invres_counts, save_data_to_file=save_data_to_file)
mergeron/gen/upp_tests.py CHANGED
@@ -1,24 +1,22 @@
1
1
  """
2
- Methods to estimate intrinsic clearnace rates and intrinsic enforcement rates
2
+ Methods to compute intrinsic clearance rates and intrinsic enforcement rates
3
3
  from generated market data.
4
4
 
5
5
  """
6
6
 
7
7
  from collections.abc import Sequence
8
8
  from contextlib import suppress
9
- from importlib.metadata import version
10
9
  from pathlib import Path
11
10
  from typing import Literal, TypeAlias, TypedDict
12
11
 
13
12
  import numpy as np
14
13
  import tables as ptb # type: ignore
14
+ from icecream import ic # type: ignore
15
15
  from joblib import Parallel, cpu_count, delayed # type: ignore
16
16
  from numpy.random import SeedSequence
17
17
  from numpy.typing import NDArray
18
18
 
19
- from mergeron.core.pseudorandom_numbers import TF, TI
20
-
21
- from .. import _PKG_NAME, RECConstants, UPPAggrSelector # noqa: TID252
19
+ from .. import VERSION, RECConstants, UPPAggrSelector # noqa: TID252
22
20
  from ..core import guidelines_boundaries as gbl # noqa: TID252
23
21
  from . import (
24
22
  EMPTY_ARRAY_DEFAULT,
@@ -31,10 +29,9 @@ from . import (
31
29
  UPPTestsRaw,
32
30
  )
33
31
  from . import data_generation as dgl
34
- from . import investigations_stats as isl
35
-
36
- __version__ = version(_PKG_NAME)
32
+ from . import enforcement_stats as esl
37
33
 
34
+ __version__ = VERSION
38
35
 
39
36
  ptb.parameters.MAX_NUMEXPR_THREADS = 8
40
37
  ptb.parameters.MAX_BLOSC_THREADS = 4
@@ -45,11 +42,11 @@ SaveData: TypeAlias = Literal[False] | tuple[Literal[True], ptb.File, ptb.Group]
45
42
  class INVRESCntsArgs(TypedDict, total=False):
46
43
  "Keyword arguments of function, :code:`sim_invres_cnts`"
47
44
 
48
- saved_array_name_suffix: str
49
- save_data_to_file: SaveData
50
45
  sample_size: int
51
46
  seed_seq_list: list[SeedSequence] | None
52
47
  nthreads: int
48
+ save_data_to_file: SaveData
49
+ saved_array_name_suffix: str
53
50
 
54
51
 
55
52
  def sim_invres_cnts_ll(
@@ -58,11 +55,11 @@ def sim_invres_cnts_ll(
58
55
  _sim_test_regime: UPPTestRegime,
59
56
  /,
60
57
  *,
61
- saved_array_name_suffix: str = "",
62
- save_data_to_file: SaveData = False,
63
58
  sample_size: int = 10**6,
64
59
  seed_seq_list: list[SeedSequence] | None = None,
65
60
  nthreads: int = 16,
61
+ save_data_to_file: SaveData = False,
62
+ saved_array_name_suffix: str = "",
66
63
  ) -> UPPTestsCounts:
67
64
  """A function to parallelize data-generation and testing
68
65
 
@@ -171,11 +168,11 @@ def sim_invres_cnts(
171
168
  _sim_test_regime: UPPTestRegime,
172
169
  /,
173
170
  *,
174
- saved_array_name_suffix: str = "",
175
- save_data_to_file: SaveData = False,
176
171
  sample_size: int = 10**6,
177
172
  seed_seq_list: list[SeedSequence] | None = None,
178
173
  nthreads: int = 16,
174
+ save_data_to_file: SaveData = False,
175
+ saved_array_name_suffix: str = "",
179
176
  ) -> UPPTestsCounts:
180
177
  # Generate market data
181
178
  _market_data_sample = dgl.gen_market_sample(
@@ -235,7 +232,7 @@ def invres_cnts(
235
232
  for _firm_cnt in 2 + np.arange(_max_firm_count):
236
233
  _firm_count_test = _fcounts == _firm_cnt
237
234
 
238
- _invres_cnts_sim_byfirmcount_array = np.row_stack((
235
+ _invres_cnts_sim_byfirmcount_array = np.vstack((
239
236
  _invres_cnts_sim_byfirmcount_array,
240
237
  np.array([
241
238
  _firm_cnt,
@@ -257,12 +254,12 @@ def invres_cnts(
257
254
  _invres_cnts_sim_byfirmcount_array[0] = 2
258
255
 
259
256
  # Clearance/enfrocement counts --- by delta
260
- _hhi_delta_ranged = isl.hhi_delta_ranger(_hhi_delta)
257
+ _hhi_delta_ranged = esl.hhi_delta_ranger(_hhi_delta)
261
258
  _invres_cnts_sim_bydelta_array = -1 * np.ones(_stats_rowlen, np.int64)
262
- for _hhi_delta_lim in isl.HHI_DELTA_KNOTS[:-1]:
259
+ for _hhi_delta_lim in esl.HHI_DELTA_KNOTS[:-1]:
263
260
  _hhi_delta_test = _hhi_delta_ranged == _hhi_delta_lim
264
261
 
265
- _invres_cnts_sim_bydelta_array = np.row_stack((
262
+ _invres_cnts_sim_bydelta_array = np.vstack((
266
263
  _invres_cnts_sim_bydelta_array,
267
264
  np.array([
268
265
  _hhi_delta_lim,
@@ -280,13 +277,13 @@ def invres_cnts(
280
277
 
281
278
  # Clearance/enfrocement counts --- by zone
282
279
  try:
283
- _hhi_zone_post_ranged = isl.hhi_zone_post_ranger(_hhi_post)
280
+ _hhi_zone_post_ranged = esl.hhi_zone_post_ranger(_hhi_post)
284
281
  except ValueError as _err:
285
- print(_hhi_post)
282
+ ic(_hhi_post)
286
283
  raise _err
287
284
 
288
285
  _stats_byconczone_sim = -1 * np.ones(_stats_rowlen + 1, np.int64)
289
- for _hhi_zone_post_knot in isl.HHI_POST_ZONE_KNOTS[:-1]:
286
+ for _hhi_zone_post_knot in esl.HHI_POST_ZONE_KNOTS[:-1]:
290
287
  _level_test = _hhi_zone_post_ranged == _hhi_zone_post_knot
291
288
 
292
289
  for _hhi_zone_delta_knot in [0, 100, 200]:
@@ -298,7 +295,7 @@ def invres_cnts(
298
295
 
299
296
  _conc_test = _level_test & _delta_test
300
297
 
301
- _stats_byconczone_sim = np.row_stack((
298
+ _stats_byconczone_sim = np.vstack((
302
299
  _stats_byconczone_sim,
303
300
  np.array([
304
301
  _hhi_zone_post_knot,
@@ -313,7 +310,7 @@ def invres_cnts(
313
310
  ]),
314
311
  ))
315
312
 
316
- _invres_cnts_sim_byconczone_array = isl.invres_cnts_byconczone(
313
+ _invres_cnts_sim_byconczone_array = esl.invres_cnts_byconczone(
317
314
  _stats_byconczone_sim[1:]
318
315
  )
319
316
  del _stats_byconczone_sim
@@ -472,11 +469,11 @@ def initialize_hd5(
472
469
  _h5_path.unlink()
473
470
  _h5_file = ptb.open_file(_h5_path, mode="w", title=_h5_title)
474
471
  _save_data_to_file: tuple[Literal[True], ptb.File, str] = (True, _h5_file, "/")
475
- _next_subgroup_name = "invres_{}_{}_{}_{}".format(
472
+ _next_subgroup_name_root = "invres_{}_{}_{}_{}".format(
476
473
  _hmg_pub_year,
477
474
  *(getattr(_test_regime, _f.name).name for _f in _test_regime.__attrs_attrs__),
478
475
  )
479
- return _save_data_to_file, _next_subgroup_name
476
+ return _save_data_to_file, _next_subgroup_name_root
480
477
 
481
478
 
482
479
  def save_data_to_hdf5(
@@ -504,7 +501,7 @@ def save_data_to_hdf5(
504
501
 
505
502
 
506
503
  def save_array_to_hdf5(
507
- _array_obj: NDArray[np.floating[TF] | np.integer[TI] | np.bool_],
504
+ _array_obj: NDArray[np.float64 | np.int64 | np.bool_],
508
505
  _array_name: str,
509
506
  _h5_group: ptb.Group,
510
507
  _h5_file: ptb.File,
@@ -525,3 +522,9 @@ def save_array_to_hdf5(
525
522
  filters=ptb.Filters(complevel=3, complib="blosc:lz4hc", fletcher32=True),
526
523
  )
527
524
  _h5_array[:] = _array_obj
525
+
526
+
527
+ if __name__ == "__main__":
528
+ print(
529
+ "This module defines classes with methods for generating UPP test arrays and UPP test-counts arrays on given data."
530
+ )