mergeron 2025.739290.5__py3-none-any.whl → 2025.739290.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mergeron might be problematic. Click here for more details.

mergeron/data/__init__.py CHANGED
@@ -1,3 +1,54 @@
1
- from .. import VERSION # noqa: TID252
1
+ """
2
+ Data useful for empirical analysis of merger enforcement policy
3
+
4
+ These data are processed for further analysis within relevant
5
+ submodules of the parent package. Thus, direct access is
6
+ unnecessary in routine use of this package.
7
+ """
8
+
9
+ from importlib import resources
10
+
11
+ from .. import _PKG_NAME, VERSION # noqa: TID252
2
12
 
3
13
  __version__ = VERSION
14
+
15
+ data_resources = resources.files(f"{_PKG_NAME}.data")
16
+
17
+ DAMODARAN_MARGIN_WORKBOOK = data_resources / "damodaran_margin_data.xls"
18
+ """
19
+ Python object pointing to included copy of Prof. Damodaran's margin data
20
+
21
+ Only used as a fallback, in case direct download from source fails.
22
+
23
+ NOTES
24
+ -----
25
+ Source data are from Prof. Aswath Damodaran, Stern School of Business, NYU; available online
26
+ at https://pages.stern.nyu.edu/~adamodar/pc/datasets/margin.xls
27
+
28
+
29
+ Use as, for example:
30
+
31
+ .. code-block:: python
32
+
33
+ from mergeron.data import DAMODARAN_MARGIN_WORKBOOK
34
+
35
+ shutil.copy2(DAMODARAN_MARGIN_WORKBOOK, Path.home() / f"{DAMODARAN_MARGIN_WORKBOOK.name}")
36
+ """
37
+
38
+ FTC_MERGER_INVESTIGATIONS_DATA = data_resources / "ftc_merger_investigations_data.zip"
39
+ """
40
+ FTC merger investigtions data published in 2004, 2007, 2008, and 2013
41
+
42
+ NOTES
43
+ -----
44
+ Raw data tables published by the FTC are loaded into a nested distionary, organized by
45
+ data period, table type, and table number. Each table is stored as a numerical array
46
+ (:mod:`numpy` arrray), with additonal attrubutes for the industry group and additonal
47
+ evidence noted in the source data.
48
+
49
+ Data for additonal data periods (time spans) not reported in the source data,
50
+ e.g., 2004-2011, are constructed by subtracting counts in the base data from counts
51
+ in the cumulative data, by table, for "enforced" mergers and "closed" mergers, when
52
+ the cumulative data for the longer period are consistent with the base data for
53
+ a sub-period.
54
+ """
@@ -13,9 +13,12 @@ from numpy.random import PCG64DXSM, Generator, SeedSequence
13
13
  from scipy import stats # type: ignore
14
14
 
15
15
  import mergeron.core.empirical_margin_distribution as emd
16
- from mergeron import DATA_DIR
16
+ from mergeron import WORK_DIR as PKG_WORK_DIR
17
17
  from mergeron.core.guidelines_boundary_functions import boundary_plot
18
18
 
19
+ WORK_DIR = globals().get("WORK_DIR", PKG_WORK_DIR)
20
+ """Redefined, in case the user defines WORK_DIR betweeen module imports."""
21
+
19
22
  SAMPLE_SIZE = 10**6
20
23
  BIN_COUNT = 25
21
24
  margin_data_obs, margin_data_wts, margin_data_stats = emd.margin_data_builder()
@@ -85,4 +88,4 @@ mgn_ax.set_xlabel("Price Cost Margin", fontsize=10)
85
88
  mgn_ax.set_ylabel("Relative Frequency", fontsize=10)
86
89
 
87
90
  mgn_fig.tight_layout()
88
- plt.savefig(DATA_DIR / f"{Path(__file__).stem}.pdf")
91
+ plt.savefig(WORK_DIR / f"{Path(__file__).stem}.pdf")
mergeron/gen/__init__.py CHANGED
@@ -11,9 +11,9 @@ import io
11
11
  from collections.abc import Sequence
12
12
  from operator import attrgetter
13
13
 
14
- import h5py
14
+ import h5py # type: ignore
15
15
  import numpy as np
16
- from attrs import Attribute, Converter, cmp_using, field, frozen, validators
16
+ from attrs import Attribute, Converter, cmp_using, field, frozen
17
17
  from numpy.random import SeedSequence
18
18
 
19
19
  from .. import ( # noqa: TID252
@@ -255,7 +255,7 @@ class ShareSpec:
255
255
  in published merger guidelines. Accordingly, the recapture ratio rounded to
256
256
  the nearest 5% is:
257
257
 
258
- * 0.85, **7-to-6 merger from symmetry**; US Guidelines, 1982, 1984, 1992, 2023
258
+ * 0.85, **7-to-6 merger from symmetry**; US Guidelines, 1992, 2023
259
259
  * 0.80, 5-to-4 merger from symmetry
260
260
  * 0.80, **5-to-4 merger to symmetry**; US Guidelines, 2010
261
261
 
@@ -470,7 +470,9 @@ class MarketSampleData:
470
470
 
471
471
  @aggregate_purchase_prob.default
472
472
  def __appd(_i: MarketSampleData) -> ArrayDouble:
473
- return np.nan * np.empty_like(_i.frmshr_array[:, :1], float)
473
+ retval: ArrayDouble = np.empty_like(_i.frmshr_array[:, :1], float)
474
+ retval.fill(np.nan)
475
+ return retval
474
476
 
475
477
  fcounts: ArrayINT = field(eq=cmp_using(np.array_equal))
476
478
  """Number of firms in market"""
@@ -488,14 +490,18 @@ class MarketSampleData:
488
490
 
489
491
  @nth_firm_share.default
490
492
  def __nfsd(_i: MarketSampleData) -> ArrayDouble:
491
- return np.nan * np.empty_like(_i.frmshr_array[:, :1], float)
493
+ retval: ArrayDouble = np.empty_like(_i.frmshr_array[:, :1], float)
494
+ retval.fill(np.nan)
495
+ return retval
492
496
 
493
497
  hhi_post: ArrayDouble = field(eq=cmp_using(np.array_equal))
494
498
  """Post-merger change in Herfindahl-Hirschmann Index (HHI)"""
495
499
 
496
500
  @hhi_post.default
497
501
  def __hpd(_i: MarketSampleData) -> ArrayDouble:
498
- return np.nan * np.empty_like(_i.frmshr_array[:, :1], float)
502
+ retval: ArrayDouble = np.empty_like(_i.frmshr_array[:, :1], float)
503
+ retval.fill(np.nan)
504
+ return retval
499
505
 
500
506
  def to_h5bin(self) -> bytes:
501
507
  """Save market sample data to HDF5 file."""
@@ -582,45 +588,39 @@ class INVResolution(str, Enameled):
582
588
  class UPPTestRegime:
583
589
  """Configuration for UPP tests."""
584
590
 
585
- resolution: INVResolution = field(
586
- kw_only=False,
587
- default=INVResolution.ENFT,
588
- validator=validators.in_([INVResolution.CLRN, INVResolution.ENFT]),
589
- )
590
- """Whether to test clearance, enforcement, or both."""
591
-
592
- guppi_aggregator: UPPAggrSelector = field(
593
- kw_only=False, default=UPPAggrSelector.MIN
594
- )
595
- """Aggregator for GUPPI test."""
596
-
597
- divr_aggregator: UPPAggrSelector = field(kw_only=False, default=UPPAggrSelector.MIN)
598
- """Aggregator for diversion ratio test."""
599
-
591
+ resolution: INVResolution = field(kw_only=False, default=INVResolution.ENFT)
592
+ """Whether to test clearance, enforcement."""
600
593
 
601
- @frozen
602
- class UPPTestsRaw:
603
- """Container for arrays marking test failures and successes
604
-
605
- A test success is a draw ("market") that meeets the
606
- specified test criterion, and a test failure is
607
- one that does not; test criteria are evaluated in
608
- :func:`enforcement_stats.gen_upp_arrays`.
609
- """
594
+ @resolution.validator
595
+ def _resvdtr(
596
+ _i: UPPTestRegime, _a: Attribute[INVResolution], _v: INVResolution
597
+ ) -> None:
598
+ if _v == INVResolution.BOTH:
599
+ raise ValueError(
600
+ "GUPPI test cannot be performed with both resolutions; only useful for reporting"
601
+ )
602
+ elif _v not in {INVResolution.CLRN, INVResolution.ENFT}:
603
+ raise ValueError(
604
+ f"Must be one of, {INVResolution.CLRN!r} or {INVResolution.ENFT!r}"
605
+ )
610
606
 
611
- guppi_test_simple: ArrayBoolean
612
- """True if GUPPI estimate meets criterion"""
607
+ guppi_aggregator: UPPAggrSelector = field(kw_only=False)
608
+ """Aggregator for GUPPI test."""
613
609
 
614
- guppi_test_compound: ArrayBoolean
615
- """True if both GUPPI estimate and diversion ratio estimate
616
- meet criterion
617
- """
610
+ @guppi_aggregator.default
611
+ def __gad(_i: UPPTestRegime) -> UPPAggrSelector:
612
+ return (
613
+ UPPAggrSelector.MIN
614
+ if _i.resolution == INVResolution.ENFT
615
+ else UPPAggrSelector.MAX
616
+ )
618
617
 
619
- cmcr_test: ArrayBoolean
620
- """True if CMCR estimate meets criterion"""
618
+ divr_aggregator: UPPAggrSelector = field(kw_only=False)
619
+ """Aggregator for diversion ratio test."""
621
620
 
622
- ipr_test: ArrayBoolean
623
- """True if IPR (partial price-simulation) estimate meets criterion"""
621
+ @divr_aggregator.default
622
+ def __dad(_i: UPPTestRegime) -> UPPAggrSelector:
623
+ return _i.guppi_aggregator
624
624
 
625
625
 
626
626
  @frozen
@@ -453,15 +453,6 @@ class MarketSample:
453
453
  )
454
454
 
455
455
  if not _ndt:
456
- # byte_stream = io.BytesIO()
457
- # with h5py.File(byte_stream, "w") as h5f:
458
- # for _a in self.dataset.__attrs_attrs__:
459
- # if all((
460
- # (_arr := getattr(self.dataset, _a.name)).any(),
461
- # not np.isnan(_arr).all(),
462
- # )):
463
- # h5f.create_dataset(_a.name, data=_arr, fletcher32=True)
464
-
465
456
  with (zpath / f"{name_root}_dataset.h5").open("wb") as _hfh:
466
457
  _hfh.write(self.dataset.to_h5bin())
467
458
 
@@ -490,10 +481,7 @@ class MarketSample:
490
481
  if _dt:
491
482
  with _dp.open("rb") as _hfh:
492
483
  object.__setattr__( # noqa: PLC2801
493
- market_sample_,
494
- "dataset",
495
- # MarketSampleData(**{_a: h5f[_a][:] for _a in h5f}),
496
- MarketSampleData.from_h5f(_hfh),
484
+ market_sample_, "dataset", MarketSampleData.from_h5f(_hfh)
497
485
  )
498
486
  if _et:
499
487
  object.__setattr__( # noqa: PLC2801
@@ -7,7 +7,7 @@ import enum
7
7
  from collections.abc import Mapping
8
8
 
9
9
  import numpy as np
10
- from scipy.interpolate import interp1d # type: ignore
10
+ from scipy.interpolate import make_interp_spline # type: ignore
11
11
 
12
12
  from .. import VERSION, ArrayBIGINT, Enameled, this_yaml # noqa: TID252
13
13
  from ..core import ftc_merger_investigations_data as fid # noqa: TID252
@@ -77,7 +77,7 @@ HHI_DELTA_KNOTS = np.array(
77
77
  )
78
78
  HHI_POST_ZONE_KNOTS = np.array([0, 1800, 2400, 10001], dtype=np.int64)
79
79
  hhi_delta_ranger, hhi_zone_post_ranger = (
80
- interp1d(_f / 1e4, _f, kind="previous", assume_sorted=True)
80
+ make_interp_spline(_f / 1e4, _f, k=0)
81
81
  for _f in (HHI_DELTA_KNOTS, HHI_POST_ZONE_KNOTS)
82
82
  )
83
83
 
@@ -256,11 +256,16 @@ def table_no_lku(
256
256
 
257
257
 
258
258
  def enf_cnts_byfirmcount(_cnts_array: ArrayBIGINT, /) -> ArrayBIGINT:
259
+ if not _cnts_array[:, 0].any():
260
+ return np.array([], int)
261
+
259
262
  ndim_in = 1
260
263
  return np.vstack([
261
264
  np.concatenate([
262
265
  (_i,),
263
- np.einsum("ij->j", _cnts_array[_cnts_array[:, 0] == _i][:, ndim_in:]),
266
+ np.einsum(
267
+ "ij->j", _cnts_array[_cnts_array[:, 0] == _i][:, ndim_in:], dtype=int
268
+ ),
264
269
  ])
265
270
  for _i in np.unique(_cnts_array[:, 0])
266
271
  ])
@@ -271,14 +276,16 @@ def enf_cnts_bydelta(_cnts_array: ArrayBIGINT, /) -> ArrayBIGINT:
271
276
  return np.vstack([
272
277
  np.concatenate([
273
278
  (_k,),
274
- np.einsum("ij->j", _cnts_array[_cnts_array[:, 1] == _k][:, ndim_in:]),
279
+ np.einsum(
280
+ "ij->j", _cnts_array[_cnts_array[:, 1] == _k][:, ndim_in:], dtype=int
281
+ ),
275
282
  ])
276
283
  for _k in HHI_DELTA_KNOTS[:-1]
277
284
  ])
278
285
 
279
286
 
280
287
  def enf_cnts_byconczone(_cnts_array: ArrayBIGINT, /) -> ArrayBIGINT:
281
- if not _cnts_array.any():
288
+ if not _cnts_array[:, 0].any() or np.isnan(_cnts_array[:, 0]).all():
282
289
  return np.array([], int)
283
290
  # Step 1: Tag and agg. from HHI-post and Delta to zone triple
284
291
  # NOTE: Although you could just map and not (partially) aggregate in this step,
@@ -315,7 +322,9 @@ def enf_cnts_byconczone(_cnts_array: ArrayBIGINT, /) -> ArrayBIGINT:
315
322
  np.array(
316
323
  (
317
324
  *zone_val,
318
- *np.einsum("ij->j", _cnts_array[:, _ndim_in:][conc_test]),
325
+ *np.einsum(
326
+ "ij->j", _cnts_array[:, _ndim_in:][conc_test], dtype=int
327
+ ),
319
328
  ),
320
329
  dtype=int,
321
330
  ),
@@ -338,7 +347,9 @@ def enf_cnts_byconczone(_cnts_array: ArrayBIGINT, /) -> ArrayBIGINT:
338
347
  (
339
348
  zone_val,
340
349
  np.einsum(
341
- "ij->j", cnts_byhhipostanddelta[hhi_zone_test][:, _nkeys:]
350
+ "ij->j",
351
+ cnts_byhhipostanddelta[hhi_zone_test][:, _nkeys:],
352
+ dtype=int,
342
353
  ),
343
354
  ),
344
355
  dtype=int,
mergeron/gen/upp_tests.py CHANGED
@@ -21,13 +21,7 @@ from .. import ( # noqa
21
21
  UPPAggrSelector,
22
22
  )
23
23
  from ..core import guidelines_boundaries as gbl # noqa: TID252
24
- from . import (
25
- INVResolution,
26
- MarketSampleData,
27
- UPPTestRegime,
28
- UPPTestsCounts,
29
- UPPTestsRaw,
30
- )
24
+ from . import INVResolution, MarketSampleData, UPPTestRegime, UPPTestsCounts
31
25
  from . import enforcement_stats as esl
32
26
 
33
27
  __version__ = VERSION
@@ -41,7 +35,7 @@ class INVRESCntsArgs(TypedDict, total=False):
41
35
  nthreads: int
42
36
 
43
37
 
44
- def compute_upp_test_counts(
38
+ def compute_upp_test_counts( # noqa: PLR0914
45
39
  _market_data_sample: MarketSampleData,
46
40
  _upp_test_parms: gbl.HMGThresholds,
47
41
  _upp_test_regime: UPPTestRegime,
@@ -71,134 +65,6 @@ def compute_upp_test_counts(
71
65
 
72
66
  """
73
67
 
74
- upp_test_arrays = compute_upp_test_arrays(
75
- _market_data_sample, _upp_test_parms, _upp_test_regime
76
- )
77
-
78
- fcounts, hhi_delta, hhi_post = (
79
- getattr(_market_data_sample, _g) for _g in ("fcounts", "hhi_delta", "hhi_post")
80
- )
81
-
82
- stats_rowlen = 6
83
- # Clearance/enforcement counts --- by firm count
84
- enf_cnts_sim_byfirmcount_array: ArrayBIGINT = np.zeros(stats_rowlen, int)
85
- firmcounts_list = np.unique(fcounts)
86
- if firmcounts_list.any():
87
- for _fc in firmcounts_list:
88
- fc_test = fcounts == _fc
89
-
90
- enf_cnts_sim_byfirmcount_array = np.vstack((
91
- enf_cnts_sim_byfirmcount_array,
92
- np.array([
93
- _fc,
94
- np.einsum("ij->", 1 * fc_test),
95
- *[
96
- np.einsum(
97
- "ij->", 1 * (fc_test & getattr(upp_test_arrays, _a.name))
98
- )
99
- for _a in upp_test_arrays.__attrs_attrs__
100
- ],
101
- ]),
102
- ))
103
-
104
- enf_cnts_sim_byfirmcount_array = enf_cnts_sim_byfirmcount_array[1:]
105
- else:
106
- enf_cnts_sim_byfirmcount_array = np.array([], int)
107
-
108
- # Clearance/enforcement counts --- by delta
109
- enf_cnts_sim_bydelta_array: ArrayBIGINT = np.zeros(stats_rowlen, int)
110
- hhi_deltaranged = esl.hhi_delta_ranger(hhi_delta)
111
- for hhi_deltalim in esl.HHI_DELTA_KNOTS[:-1]:
112
- hhi_deltatest = hhi_deltaranged == hhi_deltalim
113
-
114
- enf_cnts_sim_bydelta_array = np.vstack((
115
- enf_cnts_sim_bydelta_array,
116
- np.array([
117
- hhi_deltalim,
118
- np.einsum("ij->", 1 * hhi_deltatest),
119
- *[
120
- np.einsum(
121
- "ij->", 1 * (hhi_deltatest & getattr(upp_test_arrays, _a.name))
122
- )
123
- for _a in upp_test_arrays.__attrs_attrs__
124
- ],
125
- ]),
126
- ))
127
-
128
- enf_cnts_sim_bydelta_array = enf_cnts_sim_bydelta_array[1:]
129
-
130
- # Clearance/enforcement counts --- by zone
131
- if np.isnan(hhi_post).all():
132
- stats_byconczone_sim = np.array([], int)
133
- else:
134
- try:
135
- hhi_zone_post_ranged = esl.hhi_zone_post_ranger(hhi_post)
136
- except ValueError as _err:
137
- print(hhi_post)
138
- raise _err
139
-
140
- stats_byconczone_sim = np.zeros(stats_rowlen + 1, int)
141
- for hhi_zone_post_knot in esl.HHI_POST_ZONE_KNOTS[:-1]:
142
- level_test = hhi_zone_post_ranged == hhi_zone_post_knot
143
-
144
- for hhi_zone_delta_knot in [0, 100, 200]:
145
- delta_test = (
146
- hhi_deltaranged > 100
147
- if hhi_zone_delta_knot == 200
148
- else hhi_deltaranged == hhi_zone_delta_knot
149
- )
150
-
151
- conc_test = level_test & delta_test
152
-
153
- stats_byconczone_sim = np.vstack((
154
- stats_byconczone_sim,
155
- np.array([
156
- hhi_zone_post_knot,
157
- hhi_zone_delta_knot,
158
- np.einsum("ij->", 1 * conc_test),
159
- *[
160
- np.einsum(
161
- "ij->",
162
- 1 * (conc_test & getattr(upp_test_arrays, _a.name)),
163
- )
164
- for _a in upp_test_arrays.__attrs_attrs__
165
- ],
166
- ]),
167
- ))
168
-
169
- enf_cnts_sim_byconczone_array = esl.enf_cnts_byconczone(stats_byconczone_sim[1:])
170
-
171
- del stats_byconczone_sim
172
- del hhi_delta, hhi_post, fcounts
173
-
174
- return UPPTestsCounts(
175
- enf_cnts_sim_byfirmcount_array,
176
- enf_cnts_sim_bydelta_array,
177
- enf_cnts_sim_byconczone_array,
178
- )
179
-
180
-
181
- def compute_upp_test_arrays(
182
- _market_data_sample: MarketSampleData,
183
- _upp_test_parms: gbl.HMGThresholds,
184
- _sim_test_regime: UPPTestRegime,
185
- /,
186
- ) -> UPPTestsRaw:
187
- """
188
- Generate UPP tests arrays for given configuration and market sample
189
-
190
- Given a standards vector, market
191
-
192
- Parameters
193
- ----------
194
- _market_data_sample
195
- market data sample
196
- _upp_test_parms
197
- guidelines thresholds for testing UPP and related statistics
198
- _sim_test_regime
199
- configuration to use for generating UPP tests
200
-
201
- """
202
68
  g_bar_, divr_bar_, cmcr_bar_, ipr_bar_ = (
203
69
  getattr(_upp_test_parms, _f) for _f in ("guppi", "divr", "cmcr", "ipr")
204
70
  )
@@ -228,32 +94,72 @@ def compute_upp_test_arrays(
228
94
  (divr_test_vector,) = _compute_test_array_seq(
229
95
  (_market_data_sample.divr_array,),
230
96
  _market_data_sample.frmshr_array,
231
- _sim_test_regime.divr_aggregator,
97
+ _upp_test_regime.divr_aggregator,
232
98
  )
233
99
 
234
100
  (guppi_test_vector, cmcr_test_vector, ipr_test_vector) = _compute_test_array_seq(
235
101
  (guppi_array, cmcr_array, ipr_array),
236
102
  _market_data_sample.frmshr_array,
237
- _sim_test_regime.guppi_aggregator,
103
+ _upp_test_regime.guppi_aggregator,
238
104
  )
239
105
  del cmcr_array, ipr_array, guppi_array
240
106
 
241
- if _sim_test_regime.resolution == INVResolution.ENFT:
242
- upp_test_arrays = UPPTestsRaw(
107
+ if _upp_test_regime.resolution == INVResolution.ENFT:
108
+ upp_test_arrays = np.hstack((
243
109
  guppi_test_vector >= g_bar_,
244
110
  (guppi_test_vector >= g_bar_) | (divr_test_vector >= divr_bar_),
245
111
  cmcr_test_vector >= cmcr_bar_,
246
112
  ipr_test_vector >= ipr_bar_,
247
- )
113
+ ))
248
114
  else:
249
- upp_test_arrays = UPPTestsRaw(
115
+ upp_test_arrays = np.hstack((
250
116
  guppi_test_vector < g_bar_,
251
117
  (guppi_test_vector < g_bar_) & (divr_test_vector < divr_bar_),
252
118
  cmcr_test_vector < cmcr_bar_,
253
119
  ipr_test_vector < ipr_bar_,
254
- )
120
+ ))
121
+
122
+ fcounts, hhi_delta, hhi_post = (
123
+ getattr(_market_data_sample, _g) for _g in ("fcounts", "hhi_delta", "hhi_post")
124
+ )
255
125
 
256
- return upp_test_arrays
126
+ # Clearance counts by firm count
127
+ enf_cnts_sim_byfirmcount_array = esl.enf_cnts_byfirmcount(
128
+ np.hstack((fcounts, np.ones_like(fcounts), upp_test_arrays))
129
+ )
130
+
131
+ # Clearance counts by Delta and Concentration Zone
132
+ hhi_zone_ranged = (
133
+ esl.hhi_zone_post_ranger(hhi_post).astype(int)
134
+ if hhi_post.any() and not np.isnan(hhi_post).all()
135
+ else np.zeros_like(hhi_post, int)
136
+ )
137
+ hhi_delta_ranged = esl.hhi_delta_ranger(hhi_delta).astype(int)
138
+
139
+ enf_cnts_sim_byhhianddelta_array = np.hstack(
140
+ (
141
+ hhi_zone_ranged,
142
+ hhi_delta_ranged,
143
+ np.ones_like(hhi_delta_ranged),
144
+ upp_test_arrays,
145
+ # *[
146
+ # 1 * getattr(upp_test_arrays, _a.name)
147
+ # for _a in upp_test_arrays.__attrs_attrs__
148
+ # ],
149
+ ),
150
+ dtype=int,
151
+ )
152
+
153
+ enf_cnts_sim_bydelta_array = esl.enf_cnts_bydelta(enf_cnts_sim_byhhianddelta_array)
154
+ enf_cnts_sim_byconczone_array = esl.enf_cnts_byconczone(
155
+ enf_cnts_sim_byhhianddelta_array
156
+ )
157
+
158
+ return UPPTestsCounts(
159
+ enf_cnts_sim_byfirmcount_array,
160
+ enf_cnts_sim_bydelta_array,
161
+ enf_cnts_sim_byconczone_array,
162
+ )
257
163
 
258
164
 
259
165
  def _compute_test_array_seq(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: mergeron
3
- Version: 2025.739290.5
3
+ Version: 2025.739290.7
4
4
  Summary: Analyze merger enforcement policy using Python
5
5
  License: MIT
6
6
  Keywords: merger policy analysis,merger guidelines,merger screening,policy presumptions,concentration standards,upward pricing pressure,GUPPI
@@ -0,0 +1,22 @@
1
+ mergeron/__init__.py,sha256=36OCTzt0VcRWkgR3gwKrpsbhIijQnK9C0PNrm27Zpio,5549
2
+ mergeron/core/__init__.py,sha256=BzL_bXHyOQG8cvo76OP3K48LkeHQCJQN7ZFPRhoOdcE,2850
3
+ mergeron/core/empirical_margin_distribution.py,sha256=ktX0r5EHooXFkTed6iTzqkHw0DRv7KIDIcX3h2ukm2I,9313
4
+ mergeron/core/ftc_merger_investigations_data.py,sha256=VP0qpBbr-CARD41U7VGVckmtq5qWgreEeop2pvUTmJU,28584
5
+ mergeron/core/guidelines_boundaries.py,sha256=srCEWzSuv7cDFCf-ity-9C0NtFCdZznn5dgiUS9Ndpo,15246
6
+ mergeron/core/guidelines_boundary_functions.py,sha256=jbGTwFXoHgsIQNp2ZotAsoy0Ja0bRoJMocBwWGpsylY,29061
7
+ mergeron/core/guidelines_boundary_functions_extra.py,sha256=CwoYu6jvQFLq-9rYneDJjKu5MHG88WkICXT3e2zskss,22354
8
+ mergeron/core/pseudorandom_numbers.py,sha256=YqcVwU-Pgc0F_pKzG9Osn14RnIuYOwE-q7GVDpCUtpI,9998
9
+ mergeron/data/__init__.py,sha256=4yOOvERJ28JIT5KRkIa_t2y9aYmuFdStPM4P38BsufM,1806
10
+ mergeron/data/damodaran_margin_data.xls,sha256=Qggl1p5nkOMJI8YUXhkwXQRz-OhRSqBTzz57N0JQyYA,79360
11
+ mergeron/data/ftc_merger_investigations_data.zip,sha256=tiB2TLFyS9LMSFIv8DBA_oEEx12DU4MyjHni4NlsRMU,24002
12
+ mergeron/demo/__init__.py,sha256=KtjBlZOl7jwBCAUhrTJB9PdrN39YLYytNiSUSM_gRmA,62
13
+ mergeron/demo/visualize_empirical_margin_distribution.py,sha256=17awsa188r7uVDJuHuCWTYwlQbfaq4n8HEHF5jK-0Ic,2532
14
+ mergeron/gen/__init__.py,sha256=JvyF3WyujbDzJNKtJb_GwqGwv7lydCkubsOMDsCND3w,22579
15
+ mergeron/gen/data_generation.py,sha256=4EnNTOUEOVde-7SL2l0v8W2kDfKixnyKOChBajoMijk,17065
16
+ mergeron/gen/data_generation_functions.py,sha256=UDh3B4FPwh4SxTdJs7-faLouf7cWUUjHarRkfJc9gjI,26408
17
+ mergeron/gen/enforcement_stats.py,sha256=axojhpdjp_ovAWsL1ZX4q5tnO8FgwwRl9Hw3RU1tP-U,11084
18
+ mergeron/gen/upp_tests.py,sha256=Czub4njLESkV5LRwb1lByEI4nHBK0AwT96LYp_bG12s,6968
19
+ mergeron/py.typed,sha256=frcCV1k9oG9oKj3dpUqdJg1PxRT2RSN_XKdLCPjaYaY,2
20
+ mergeron-2025.739290.7.dist-info/METADATA,sha256=RXKEm0CLvb4GkgE7NEA6U_1fR10JIsWxOiUkbhL_Kls,14512
21
+ mergeron-2025.739290.7.dist-info/WHEEL,sha256=XbeZDeTWKc1w7CSIyre5aMDU_-PohRwTQceYnisIYYY,88
22
+ mergeron-2025.739290.7.dist-info/RECORD,,
Binary file
Binary file
@@ -1,24 +0,0 @@
1
- mergeron/__init__.py,sha256=Semlg7F-RfPXPDzYmfDX7UkULEY5p5Rq5s2obV44Ia4,4892
2
- mergeron/core/__init__.py,sha256=e62LHGyvz9zUPQbU4nos8JxydraCQrJJa-2qApY2KXQ,3081
3
- mergeron/core/empirical_margin_distribution.py,sha256=828H1UqJZK6dARS1V75fsP6FlKsGMAkBXZO-sw6WJdI,9449
4
- mergeron/core/ftc_merger_investigations_data.py,sha256=pAM5WMTw8znZn9vN_-AW5A8kH-IRwMwMFA5F_fRNaYQ,28492
5
- mergeron/core/guidelines_boundaries.py,sha256=Avtnbo2iPhhUu8JLjP7mFQo8xVZvPvNCZU9GSsVkReM,15234
6
- mergeron/core/guidelines_boundary_functions.py,sha256=Mxw9OE-pwMbTRNaJLVtx7AYfJeFJgAvFM1LrPuZrzpM,28877
7
- mergeron/core/guidelines_boundary_functions_extra.py,sha256=TnQcBVABvJulDIue3jUlVOaKmwu0EAME6z8RCf-W3Tc,16099
8
- mergeron/core/pseudorandom_numbers.py,sha256=YqcVwU-Pgc0F_pKzG9Osn14RnIuYOwE-q7GVDpCUtpI,9998
9
- mergeron/data/__init__.py,sha256=KtjBlZOl7jwBCAUhrTJB9PdrN39YLYytNiSUSM_gRmA,62
10
- mergeron/data/damodaran_margin_data.xls,sha256=Qggl1p5nkOMJI8YUXhkwXQRz-OhRSqBTzz57N0JQyYA,79360
11
- mergeron/data/damodaran_margin_data_serialized.zip,sha256=_oCVHI2YpG762WN5-KM5vnAS4OoAGkhP7Vd8KSWuiG8,20384
12
- mergeron/data/ftc_invdata.msgpack,sha256=WBFHgi7Ld4R-h2zL2Zc3TOIlKqVrbVFMH1LoI4-T-M0,264664
13
- mergeron/data/ftc_invdata.zip,sha256=1J74iTM1RVHTcvqv2v7H3yqeYN3vEvp4ZUszDk1oHQc,15686
14
- mergeron/demo/__init__.py,sha256=KtjBlZOl7jwBCAUhrTJB9PdrN39YLYytNiSUSM_gRmA,62
15
- mergeron/demo/visualize_empirical_margin_distribution.py,sha256=U40SYsSSghJrDgLHu91A2uRd1dqv5yDkkFM8NFYMGHM,2388
16
- mergeron/gen/__init__.py,sha256=S2Snd5QSgbGj7EQ2eVc9vDwy8vUYQQo38gjYuWuYOr8,22448
17
- mergeron/gen/data_generation.py,sha256=LbtNg-3DKyqqzPDMFI2_cFIof3JdaVHwFqwifIH_AWc,17644
18
- mergeron/gen/data_generation_functions.py,sha256=UDh3B4FPwh4SxTdJs7-faLouf7cWUUjHarRkfJc9gjI,26408
19
- mergeron/gen/enforcement_stats.py,sha256=CsO5pk9lKV2xsdINukWL2DjEpt4Asq0YHHJw6zAziCc,10776
20
- mergeron/gen/upp_tests.py,sha256=tsQJYOE6CahyChq_y_LKtq6P3n3exlBiYncB1GYx0Hg,9986
21
- mergeron/py.typed,sha256=frcCV1k9oG9oKj3dpUqdJg1PxRT2RSN_XKdLCPjaYaY,2
22
- mergeron-2025.739290.5.dist-info/METADATA,sha256=F4X5U0u7nlHGtBtZSKydwd0jRzFc1s5ooWFpPCJ0zVQ,14512
23
- mergeron-2025.739290.5.dist-info/WHEEL,sha256=XbeZDeTWKc1w7CSIyre5aMDU_-PohRwTQceYnisIYYY,88
24
- mergeron-2025.739290.5.dist-info/RECORD,,