huff 1.5.2__py3-none-any.whl → 1.5.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
huff/models.py CHANGED
@@ -4,8 +4,8 @@
4
4
  # Author: Thomas Wieland
5
5
  # ORCID: 0000-0001-5168-9846
6
6
  # mail: geowieland@googlemail.com
7
- # Version: 1.5.2
8
- # Last update: 2025-07-02 21:09
7
+ # Version: 1.5.3
8
+ # Last update: 2025-07-15 17:22
9
9
  # Copyright (c) 2025 Thomas Wieland
10
10
  #-----------------------------------------------------------------------
11
11
 
@@ -105,7 +105,7 @@ class CustomerOrigins:
105
105
  metadata = self.metadata
106
106
 
107
107
  if marketsize_col not in geodata_gpd_original.columns:
108
- raise KeyError ("Column " + marketsize_col + " not in data")
108
+ raise KeyError ("Error while defining market size variable: Column " + marketsize_col + " not in data")
109
109
  else:
110
110
  metadata["marketsize_col"] = marketsize_col
111
111
 
@@ -140,13 +140,13 @@ class CustomerOrigins:
140
140
  metadata = self.metadata
141
141
 
142
142
  if func not in ["power", "exponential", "logistic"]:
143
- raise ValueError("Parameter 'func' must be 'power', 'exponential' or 'logistic'")
143
+ raise ValueError("Error while defining transport costs weighting: Parameter 'func' must be 'power', 'exponential' or 'logistic'")
144
144
 
145
145
  if isinstance(param_lambda, list) and func != "logistic":
146
- raise ValueError("Function type "+ func + " requires one single parameter value")
146
+ raise ValueError("Error while defining transport costs weighting: Function type "+ func + " requires one single parameter value")
147
147
 
148
148
  if isinstance(param_lambda, (int, float)) and func == "logistic":
149
- raise ValueError("Function type "+ func + " requires two parameters in a list")
149
+ raise ValueError("Error while defining transport costs weighting: Function type "+ func + " requires two parameters in a list")
150
150
 
151
151
  metadata["weighting"][0]["name"] = "t_ij"
152
152
  metadata["weighting"][0]["func"] = func
@@ -299,7 +299,7 @@ class SupplyLocations:
299
299
  metadata = self.metadata
300
300
 
301
301
  if attraction_col not in geodata_gpd_original.columns:
302
- raise KeyError ("Column " + attraction_col + " not in data")
302
+ raise KeyError ("Error while defining attraction variable: Column " + attraction_col + " not in data")
303
303
  else:
304
304
  metadata["attraction_col"][0] = attraction_col
305
305
 
@@ -316,7 +316,7 @@ class SupplyLocations:
316
316
  metadata = self.metadata
317
317
 
318
318
  if metadata["attraction_col"] is None:
319
- raise ValueError ("Attraction column is not yet defined. Use SupplyLocations.define_attraction()")
319
+ raise ValueError ("Error while defining attraction weighting: Attraction column is not yet defined. Use SupplyLocations.define_attraction()")
320
320
 
321
321
  metadata["weighting"][0]["name"] = "A_j"
322
322
  metadata["weighting"][0]["func"] = func
@@ -336,7 +336,7 @@ class SupplyLocations:
336
336
  metadata = self.metadata
337
337
 
338
338
  if metadata["attraction_col"] is None:
339
- raise ValueError ("Attraction column is not yet defined. Use SupplyLocations.define_attraction()")
339
+ raise ValueError ("Error while adding utility variable: Attraction column is not yet defined. Use SupplyLocations.define_attraction()")
340
340
 
341
341
  no_attraction_vars = len(metadata["attraction_col"])
342
342
  new_key = no_attraction_vars
@@ -371,9 +371,9 @@ class SupplyLocations:
371
371
  new_destinations_metadata = new_destinations.get_metadata()
372
372
 
373
373
  if list(new_destinations_gpd_original.columns) != list(geodata_gpd_original.columns):
374
- raise KeyError("Supply locations and new destinations data have different column names.")
374
+ raise KeyError("Error while adding new destinations: Supply locations and new destinations data have different column names.")
375
375
  if list(new_destinations_gpd.columns) != list(geodata_gpd.columns):
376
- raise KeyError("Supply locations and new destinations data have different column names.")
376
+ raise KeyError("Error while adding new destinations: Supply locations and new destinations data have different column names.")
377
377
 
378
378
  geodata_gpd_original = pd.concat(
379
379
  [
@@ -644,7 +644,7 @@ class InteractionMatrix:
644
644
  )
645
645
 
646
646
  if time_distance_matrix.get_metadata() is None:
647
- raise ValueError ("No transport costs matrix was built.")
647
+ raise ValueError ("Error in transport costs calculation: No transport costs matrix was built.")
648
648
 
649
649
  transport_costs_matrix = time_distance_matrix.get_matrix()
650
650
  transport_costs_matrix_config = time_distance_matrix.get_config()
@@ -773,13 +773,13 @@ class InteractionMatrix:
773
773
  interaction_matrix_metadata = self.get_metadata()
774
774
 
775
775
  if "t_ij" not in interaction_matrix_df.columns:
776
- raise ValueError ("No transport cost variable in interaction matrix")
776
+ raise ValueError ("Error in utility calculation: No transport cost variable in interaction matrix")
777
777
  if "A_j" not in interaction_matrix_df.columns:
778
- raise ValueError ("No attraction variable in interaction matrix")
778
+ raise ValueError ("Error in utility calculation: No attraction variable in interaction matrix")
779
779
  if interaction_matrix_df["t_ij"].isna().all():
780
- raise ValueError ("Transport cost variable is not defined")
780
+ raise ValueError ("Error in utility calculation: Transport cost variable is not defined")
781
781
  if interaction_matrix_df["A_j"].isna().all():
782
- raise ValueError ("Attraction variable is not defined")
782
+ raise ValueError ("Error in utility calculation: Attraction variable is not defined")
783
783
 
784
784
  check_vars(
785
785
  df = interaction_matrix_df,
@@ -797,7 +797,7 @@ class InteractionMatrix:
797
797
  elif tc_weighting["func"] == "logistic":
798
798
  interaction_matrix_df["t_ij_weighted"] = 1+np.exp(tc_weighting["param"][0] + tc_weighting["param"][1] * interaction_matrix_df['t_ij'])
799
799
  else:
800
- raise ValueError ("Transport costs weighting is not defined.")
800
+ raise ValueError ("Error in utility calculation: Transport costs weighting is not defined.")
801
801
 
802
802
  supply_locations = self.supply_locations
803
803
  supply_locations_metadata = supply_locations.get_metadata()
@@ -808,7 +808,7 @@ class InteractionMatrix:
808
808
  elif tc_weighting["func"] == "exponential":
809
809
  interaction_matrix_df["A_j_weighted"] = np.exp(attraction_weighting["param"] * interaction_matrix_df["A_j"])
810
810
  else:
811
- raise ValueError ("Attraction weighting is not defined.")
811
+ raise ValueError ("Error in utility calculation: Attraction weighting is not defined.")
812
812
 
813
813
  attrac_vars = supply_locations_metadata["attraction_col"]
814
814
  attrac_vars_no = len(attrac_vars)
@@ -831,7 +831,7 @@ class InteractionMatrix:
831
831
  elif func == "exponential":
832
832
  interaction_matrix_df[name+"_weighted"] = np.exp(param * interaction_matrix_df[name])
833
833
  else:
834
- raise ValueError ("Weighting for " + name + " is not defined.")
834
+ raise ValueError ("Error in utility calculation: Weighting for " + name + " is not defined.")
835
835
 
836
836
  interaction_matrix_df["A_j_weighted"] = interaction_matrix_df["A_j_weighted"]*interaction_matrix_df[name+"_weighted"]
837
837
 
@@ -881,9 +881,9 @@ class InteractionMatrix:
881
881
  interaction_matrix_df = self.interaction_matrix_df
882
882
 
883
883
  if "C_i" not in interaction_matrix_df.columns:
884
- raise ValueError ("No market size variable in interaction matrix")
884
+ raise ValueError ("Error in flows calculation: No market size variable in interaction matrix")
885
885
  if interaction_matrix_df["C_i"].isna().all():
886
- raise ValueError ("Market size column in customer origins not defined. Use CustomerOrigins.define_marketsize()")
886
+ raise ValueError ("Error in flows calculation: Market size column in customer origins not defined. Use CustomerOrigins.define_marketsize()")
887
887
 
888
888
  check_vars(
889
889
  df = interaction_matrix_df,
@@ -940,7 +940,7 @@ class InteractionMatrix:
940
940
  else:
941
941
 
942
942
  if "C_i" not in interaction_matrix_df.columns or interaction_matrix_df["C_i"].isna().all():
943
- raise ValueError("Customer origins market size is not available")
943
+ raise ValueError("Error in hansen accessibility calculation: Customer origins market size is not available")
944
944
 
945
945
  customer_origins_metadata = self.customer_origins.get_metadata()
946
946
  tc_weighting = customer_origins_metadata["weighting"][0]
@@ -951,7 +951,7 @@ class InteractionMatrix:
951
951
  elif tc_weighting["func"] == "logistic":
952
952
  interaction_matrix_df["t_ij_weighted"] = 1+np.exp(tc_weighting["param"][0] + tc_weighting["param"][1] * interaction_matrix_df['t_ij'])
953
953
  else:
954
- raise ValueError ("Transport costs weighting is not defined.")
954
+ raise ValueError ("Error in hansen accessibility calculation: Transport costs weighting is not defined.")
955
955
 
956
956
  interaction_matrix_df["U_ji"] = interaction_matrix_df["C_i"]*interaction_matrix_df["t_ij_weighted"]
957
957
  hansen_df = pd.DataFrame(interaction_matrix_df.groupby("j")["U_ji"].sum()).reset_index()
@@ -1077,16 +1077,16 @@ class InteractionMatrix:
1077
1077
  ):
1078
1078
 
1079
1079
  if fit_by not in ["probabilities", "flows"]:
1080
- raise ValueError ("Parameter 'fit_by' must be 'probabilities' or 'flows'")
1080
+ raise ValueError ("Error in loglik: Parameter 'fit_by' must be 'probabilities' or 'flows'")
1081
1081
 
1082
1082
  if not isinstance(params, list):
1083
1083
  if isinstance(params, np.ndarray):
1084
1084
  params = params.tolist()
1085
1085
  else:
1086
- raise ValueError("Parameter 'params' must be a list or np.ndarray with at least 2 parameter values")
1086
+ raise ValueError("Error in loglik: Parameter 'params' must be a list or np.ndarray with at least 2 parameter values")
1087
1087
 
1088
1088
  if len(params) < 2:
1089
- raise ValueError("Parameter 'params' must be a list or np.ndarray with at least 2 parameter values")
1089
+ raise ValueError("Error in loglik: Parameter 'params' must be a list or np.ndarray with at least 2 parameter values")
1090
1090
 
1091
1091
  customer_origins = self.customer_origins
1092
1092
  customer_origins_metadata = customer_origins.get_metadata()
@@ -1096,7 +1096,7 @@ class InteractionMatrix:
1096
1096
  if customer_origins_metadata["weighting"][0]["func"] == "logistic":
1097
1097
 
1098
1098
  if len(params) < 3:
1099
- raise ValueError("When using logistic weighting, parameter 'params' must be a list or np.ndarray with at least 3 parameter values")
1099
+ raise ValueError("Error in loglik: When using logistic weighting, parameter 'params' must be a list or np.ndarray with at least 3 parameter values")
1100
1100
 
1101
1101
  param_gamma, param_lambda, param_lambda2 = params[0], params[1], params[2]
1102
1102
 
@@ -1116,7 +1116,7 @@ class InteractionMatrix:
1116
1116
 
1117
1117
  else:
1118
1118
 
1119
- raise ValueError ("Huff Model with transport cost weighting of type " + customer_origins_metadata["weighting"][0]["func"] + " must have >= 2 input parameters")
1119
+ raise ValueError ("Error in loglik: Huff Model with transport cost weighting of type " + customer_origins_metadata["weighting"][0]["func"] + " must have >= 2 input parameters")
1120
1120
 
1121
1121
  elif customer_origins_metadata["weighting"][0]["func"] == "logistic":
1122
1122
 
@@ -1126,7 +1126,7 @@ class InteractionMatrix:
1126
1126
 
1127
1127
  else:
1128
1128
 
1129
- raise ValueError("Huff Model with transport cost weightig of type " + customer_origins_metadata["weighting"][0]["func"] + " must have >= 3 input parameters")
1129
+ raise ValueError("Error in loglik: Huff Model with transport cost weightig of type " + customer_origins_metadata["weighting"][0]["func"] + " must have >= 3 input parameters")
1130
1130
 
1131
1131
  if (customer_origins_metadata["weighting"][0]["func"] in ["power", "exponential"] and len(params) > 2):
1132
1132
 
@@ -1224,10 +1224,10 @@ class InteractionMatrix:
1224
1224
  params_metadata = params_metadata_customer_origins+params_metadata_supply_locations
1225
1225
 
1226
1226
  if len(initial_params) < 2 or len(initial_params) != params_metadata:
1227
- raise ValueError("Parameter 'initial_params' must be a list with " + str(params_metadata) + " entries (Attaction: " + str(params_metadata_supply_locations) + ", Transport costs: " + str(params_metadata_customer_origins) + ")")
1227
+ raise ValueError("Error in huff_ml_fit: Parameter 'initial_params' must be a list with " + str(params_metadata) + " entries (Attaction: " + str(params_metadata_supply_locations) + ", Transport costs: " + str(params_metadata_customer_origins) + ")")
1228
1228
 
1229
1229
  if len(bounds) != len(initial_params):
1230
- raise ValueError("Parameter 'bounds' must have the same length as parameter 'initial_params' (" + str(len(bounds)) + ", " + str(len(initial_params)) + ")")
1230
+ raise ValueError("Error in huff_ml_fit: Parameter 'bounds' must have the same length as parameter 'initial_params' (" + str(len(bounds)) + ", " + str(len(initial_params)) + ")")
1231
1231
 
1232
1232
  ml_result = minimize(
1233
1233
  self.loglik,
@@ -1344,7 +1344,7 @@ class InteractionMatrix:
1344
1344
  "update_estimates": update_estimates
1345
1345
  }
1346
1346
 
1347
- return self
1347
+ return self
1348
1348
 
1349
1349
  def update(self):
1350
1350
 
@@ -1360,12 +1360,12 @@ class InteractionMatrix:
1360
1360
  supply_locations_geodata_gpd_new = supply_locations_geodata_gpd[supply_locations_geodata_gpd["j_update"] == 1]
1361
1361
 
1362
1362
  if len(supply_locations_geodata_gpd_new) < 1:
1363
- raise ValueError("There are no new destinations for an interaction matrix update. Use SupplyLocations.add_new_destinations()")
1363
+ raise ValueError("Error in InteractionMatrix update: There are no new destinations for an interaction matrix update. Use SupplyLocations.add_new_destinations()")
1364
1364
 
1365
1365
  supply_locations_geodata_gpd_original = supply_locations.get_geodata_gpd_original().copy()
1366
1366
  supply_locations_geodata_gpd_original_new = supply_locations_geodata_gpd_original[supply_locations_geodata_gpd_original["j_update"] == 1]
1367
1367
  if len(supply_locations_geodata_gpd_original_new) < 1:
1368
- raise ValueError("There are no new destinations for an interaction matrix update. Use SupplyLocations.add_new_destinations()")
1368
+ raise ValueError("Error in InteractionMatrix update: There are no new destinations for an interaction matrix update. Use SupplyLocations.add_new_destinations()")
1369
1369
 
1370
1370
  supply_locations_new = SupplyLocations(
1371
1371
  geodata_gpd=supply_locations_geodata_gpd_new,
@@ -1458,7 +1458,7 @@ class MarketAreas:
1458
1458
  ):
1459
1459
 
1460
1460
  if not isinstance(model_object, (HuffModel, MCIModel, InteractionMatrix)):
1461
- raise ValueError("Parameter 'interaction_matrix' must be of class HuffModel, MCIModel, or InteractionMatrix")
1461
+ raise ValueError("Error while adding MarketAreas to model: Parameter 'interaction_matrix' must be of class HuffModel, MCIModel, or InteractionMatrix")
1462
1462
 
1463
1463
  if isinstance(model_object, MCIModel):
1464
1464
 
@@ -1479,7 +1479,7 @@ class MarketAreas:
1479
1479
  elif isinstance(model_object, InteractionMatrix):
1480
1480
 
1481
1481
  if output_model not in ["Huff", "MCI"]:
1482
- raise ValueError("Parameter 'output_model' must be either 'Huff' or 'MCI'")
1482
+ raise ValueError("Error while adding MarketAreas to model: Parameter 'output_model' must be either 'Huff' or 'MCI'")
1483
1483
 
1484
1484
  if output_model == "Huff":
1485
1485
 
@@ -1597,10 +1597,19 @@ class HuffModel:
1597
1597
  else:
1598
1598
 
1599
1599
  name = supply_locations_metadata["weighting"][key]["name"]
1600
- param = supply_locations_metadata["weighting"][key]["param"]
1601
1600
  func = supply_locations_metadata["weighting"][key]["func"]
1602
1601
 
1603
- print(f"{name[:16]:16} {round(param, 3)} ({func})")
1602
+ if "param" in supply_locations_metadata["weighting"][key]:
1603
+
1604
+ param = supply_locations_metadata["weighting"][key]["param"]
1605
+
1606
+ if param is not None:
1607
+
1608
+ print(f"{name[:16]:16} {round(param, 3)} ({func})")
1609
+
1610
+ else:
1611
+
1612
+ print(f"{attrac_vars[key][:16]:16} NA ({func})")
1604
1613
 
1605
1614
  print("----------------------------------")
1606
1615
 
@@ -1743,10 +1752,10 @@ class HuffModel:
1743
1752
  if isinstance(params, np.ndarray):
1744
1753
  params = params.tolist()
1745
1754
  else:
1746
- raise ValueError("Parameter 'params' must be a list or np.ndarray with at least 2 parameter values")
1755
+ raise ValueError("Error in loglik: Parameter 'params' must be a list or np.ndarray with at least 2 parameter values")
1747
1756
 
1748
1757
  if len(params) < 2:
1749
- raise ValueError("Parameter 'params' must be a list or np.ndarray with at least 2 parameter values")
1758
+ raise ValueError("Error in loglik: Parameter 'params' must be a list or np.ndarray with at least 2 parameter values")
1750
1759
 
1751
1760
  market_areas_df = self.market_areas_df
1752
1761
 
@@ -1758,7 +1767,7 @@ class HuffModel:
1758
1767
  if customer_origins_metadata["weighting"][0]["func"] == "logistic":
1759
1768
 
1760
1769
  if len(params) < 3:
1761
- raise ValueError("When using logistic weighting, parameter 'params' must be a list or np.ndarray with at least 3 parameter values")
1770
+ raise ValueError("Error in loglik: When using logistic weighting, parameter 'params' must be a list or np.ndarray with at least 3 parameter values")
1762
1771
 
1763
1772
  param_gamma, param_lambda, param_lambda2 = params[0], params[1], params[2]
1764
1773
 
@@ -1776,7 +1785,7 @@ class HuffModel:
1776
1785
 
1777
1786
  else:
1778
1787
 
1779
- raise ValueError ("Huff Model with transport cost weighting of type " + customer_origins_metadata["weighting"][0]["func"] + " must have >= 2 input parameters")
1788
+ raise ValueError ("Error in loglik: Huff Model with transport cost weighting of type " + customer_origins_metadata["weighting"][0]["func"] + " must have >= 2 input parameters")
1780
1789
 
1781
1790
  elif customer_origins_metadata["weighting"][0]["func"] == "logistic":
1782
1791
 
@@ -1786,7 +1795,7 @@ class HuffModel:
1786
1795
 
1787
1796
  else:
1788
1797
 
1789
- raise ValueError("Huff Model with transport cost weightig of type " + customer_origins_metadata["weighting"][0]["func"] + " must have >= 3 input parameters")
1798
+ raise ValueError("Error in loglik: Huff Model with transport cost weightig of type " + customer_origins_metadata["weighting"][0]["func"] + " must have >= 3 input parameters")
1790
1799
 
1791
1800
  if (customer_origins_metadata["weighting"][0]["func"] in ["power", "exponential"] and len(params) > 2):
1792
1801
 
@@ -1897,10 +1906,10 @@ class HuffModel:
1897
1906
  params_metadata = params_metadata_customer_origins+params_metadata_supply_locations
1898
1907
 
1899
1908
  if len(initial_params) < 2 or len(initial_params) != params_metadata:
1900
- raise ValueError("Parameter 'initial_params' must be a list with " + str(params_metadata) + " entries (Attaction: " + str(params_metadata_supply_locations) + ", Transport costs: " + str(params_metadata_customer_origins) + ")")
1909
+ raise ValueError("Error in ml_fit: Parameter 'initial_params' must be a list with " + str(params_metadata) + " entries (Attaction: " + str(params_metadata_supply_locations) + ", Transport costs: " + str(params_metadata_customer_origins) + ")")
1901
1910
 
1902
1911
  if len(bounds) != len(initial_params):
1903
- raise ValueError("Parameter 'bounds' must have the same length as parameter 'initial_params' (" + str(len(bounds)) + ", " + str(len(initial_params)) + ")")
1912
+ raise ValueError("Error in ml_fit: Parameter 'bounds' must have the same length as parameter 'initial_params' (" + str(len(bounds)) + ", " + str(len(initial_params)) + ")")
1904
1913
 
1905
1914
  ml_result = minimize(
1906
1915
  self.loglik,
@@ -2011,9 +2020,116 @@ class HuffModel:
2011
2020
 
2012
2021
  else:
2013
2022
 
2014
- raise ValueError("Parameter 'fit_by' must be 'probabilities', 'flows' or 'totals'")
2023
+ raise ValueError("Error in ml_fit: Parameter 'fit_by' must be 'probabilities', 'flows' or 'totals'")
2015
2024
 
2016
2025
  return self
2026
+
2027
+ def confint(
2028
+ self,
2029
+ alpha = 0.05,
2030
+ repeats = 3,
2031
+ sample_size = 0.75,
2032
+ replace = True
2033
+ ):
2034
+
2035
+ if self.interaction_matrix.metadata["fit"] is None or self.interaction_matrix.metadata["fit"] == {}:
2036
+ raise ValueError("Error while estimating confidence intervals: Model object does not contain information towards fit procedure")
2037
+
2038
+ keys_necessary = [
2039
+ "function",
2040
+ "fit_by",
2041
+ "initial_params",
2042
+ "method",
2043
+ "bounds",
2044
+ "constraints"
2045
+ ]
2046
+
2047
+ for key_necessary in keys_necessary:
2048
+ if key_necessary not in self.interaction_matrix.metadata["fit"]:
2049
+ raise KeyError(f"Error while estimating confidence intervals: Model object does not contain full information towards fit procedure. Missing key {key_necessary}")
2050
+
2051
+ fitted_params_repeats = []
2052
+
2053
+ alpha_lower = alpha/2
2054
+ alpha_upper = 1-alpha/2
2055
+
2056
+ huff_model_copy = copy.deepcopy(self)
2057
+
2058
+ if self.interaction_matrix.metadata["fit"]["fit_by"] in ["probabilities", "flows"]:
2059
+
2060
+ for i in range(repeats):
2061
+
2062
+ try:
2063
+
2064
+ n_samples = int(len(huff_model_copy.interaction_matrix.interaction_matrix_df)*sample_size)
2065
+
2066
+ huff_model_copy.interaction_matrix.interaction_matrix_df = huff_model_copy.interaction_matrix.interaction_matrix_df.sample(
2067
+ n = n_samples,
2068
+ replace = replace
2069
+ )
2070
+
2071
+ huff_model_copy.ml_fit(
2072
+ initial_params = self.interaction_matrix.metadata["fit"]["initial_params"],
2073
+ method = self.interaction_matrix.metadata["fit"]["method"],
2074
+ bounds = self.interaction_matrix.metadata["fit"]["bounds"],
2075
+ constraints = self.interaction_matrix.metadata["fit"]["constraints"],
2076
+ fit_by = self.interaction_matrix.metadata["fit"]["fit_by"],
2077
+ update_estimates = True,
2078
+ check_numbers = True
2079
+ )
2080
+
2081
+ minimize_fittedparams = huff_model_copy.interaction_matrix.metadata["fit"]["minimize_fittedparams"]
2082
+
2083
+ fitted_params_repeats.append(minimize_fittedparams)
2084
+
2085
+ except Exception as err:
2086
+
2087
+ print (f"Error in repeat {str(i)}: {err}")
2088
+
2089
+ elif self.metadata["fit"]["fit_by"] == "totals":
2090
+
2091
+ for i in range(repeats):
2092
+
2093
+ n_samples = int(len(huff_model_copy.market_areas_df)*sample_size)
2094
+
2095
+ huff_model_copy.market_areas_df = huff_model_copy.market_areas_df.sample(
2096
+ n = n_samples,
2097
+ replace = replace
2098
+ )
2099
+
2100
+ huff_model_copy.interaction_matrix.interaction_matrix_df = huff_model_copy.interaction_matrix.interaction_matrix_df[
2101
+ huff_model_copy.interaction_matrix.interaction_matrix_df["j"].isin(huff_model_copy.market_areas_df["j"])
2102
+ ]
2103
+
2104
+ huff_model_copy.ml_fit(
2105
+ initial_params = self.interaction_matrix.metadata["fit"]["initial_params"],
2106
+ method = self.interaction_matrix.metadata["fit"]["method"],
2107
+ bounds = self.interaction_matrix.metadata["fit"]["bounds"],
2108
+ constraints = self.interaction_matrix.metadata["fit"]["constraints"],
2109
+ fit_by = self.interaction_matrix.metadata["fit"]["fit_by"],
2110
+ update_estimates = True,
2111
+ check_numbers = True
2112
+ )
2113
+
2114
+ minimize_fittedparams = huff_model_copy.interaction_matrix.metadata["fit"]["minimize_fittedparams"]
2115
+
2116
+ fitted_params_repeats.append(minimize_fittedparams)
2117
+
2118
+ else:
2119
+
2120
+ raise ValueError("Error while estimating confidence intervals: Parameter 'fit_by' must be 'probabilities', 'flows' or 'totals'")
2121
+
2122
+ fitted_params_repeats_array = np.array(fitted_params_repeats)
2123
+ fitted_params_repeats_array_transposed = fitted_params_repeats_array.T
2124
+
2125
+ param_ci = pd.DataFrame(columns=["lower", "upper"])
2126
+
2127
+ for i, col in enumerate(fitted_params_repeats_array_transposed):
2128
+
2129
+ param_ci.loc[i, "lower"] = np.quantile(col, alpha_lower)
2130
+ param_ci.loc[i, "upper"] = np.quantile(col, alpha_upper)
2131
+
2132
+ return param_ci
2017
2133
 
2018
2134
  def update(self):
2019
2135
 
@@ -2110,7 +2226,7 @@ class HuffModel:
2110
2226
 
2111
2227
  else:
2112
2228
 
2113
- raise ValueError("Parameter 'by' must be 'probabilities', 'flows', or 'totals'")
2229
+ raise ValueError("Error in HuffModel.modelfit: Parameter 'by' must be 'probabilities', 'flows', or 'totals'")
2114
2230
 
2115
2231
  class MCIModel:
2116
2232
 
@@ -2264,9 +2380,9 @@ class MCIModel:
2264
2380
  interaction_matrix_metadata = interaction_matrix.get_metadata()
2265
2381
 
2266
2382
  if interaction_matrix_df["t_ij"].isna().all():
2267
- raise ValueError ("Transport cost variable is not defined")
2383
+ raise ValueError ("Error in utility calculation: Transport cost variable is not defined")
2268
2384
  if interaction_matrix_df["A_j"].isna().all():
2269
- raise ValueError ("Attraction variable is not defined")
2385
+ raise ValueError ("Error in utility calculation: Attraction variable is not defined")
2270
2386
 
2271
2387
  check_vars(
2272
2388
  df = interaction_matrix_df,
@@ -2367,10 +2483,10 @@ class MCIModel:
2367
2483
  interaction_matrix_df = interaction_matrix.get_interaction_matrix_df()
2368
2484
 
2369
2485
  if "C_i" not in interaction_matrix_df.columns:
2370
- raise ValueError ("No market size column defined in interaction matrix.")
2486
+ raise ValueError ("Error in flows calculation: No market size column defined in interaction matrix.")
2371
2487
 
2372
2488
  if interaction_matrix_df["C_i"].isna().all():
2373
- raise ValueError ("Market size column in customer origins not defined. Use CustomerOrigins.define_marketsize()")
2489
+ raise ValueError ("Error in flows calculation: Market size column in customer origins not defined. Use CustomerOrigins.define_marketsize()")
2374
2490
 
2375
2491
  check_vars(
2376
2492
  df = interaction_matrix_df,
@@ -2425,12 +2541,12 @@ def load_geodata (
2425
2541
  ):
2426
2542
 
2427
2543
  if location_type is None or (location_type != "origins" and location_type != "destinations"):
2428
- raise ValueError ("Argument location_type must be either 'origins' or 'destinations'")
2544
+ raise ValueError ("Error while loading geodata: Argument location_type must be either 'origins' or 'destinations'")
2429
2545
 
2430
2546
  if isinstance(data, gp.GeoDataFrame):
2431
2547
  geodata_gpd_original = data
2432
2548
  if not all(geodata_gpd_original.geometry.geom_type == "Point"):
2433
- raise ValueError ("Input geopandas.GeoDataFrame must be of type 'Point'")
2549
+ raise ValueError ("Error while loading geodata: Input geopandas.GeoDataFrame must be of type 'Point'")
2434
2550
  crs_input = geodata_gpd_original.crs
2435
2551
  elif isinstance(data, pd.DataFrame):
2436
2552
  geodata_tab = data
@@ -2438,13 +2554,13 @@ def load_geodata (
2438
2554
  if data_type == "shp":
2439
2555
  geodata_gpd_original = gp.read_file(data)
2440
2556
  if not all(geodata_gpd_original.geometry.geom_type == "Point"):
2441
- raise ValueError ("Input shapefile must be of type 'Point'")
2557
+ raise ValueError ("Error while loading geodata: Input shapefile must be of type 'Point'")
2442
2558
  crs_input = geodata_gpd_original.crs
2443
2559
  elif data_type == "csv" or data_type == "xlsx":
2444
2560
  if x_col is None:
2445
- raise ValueError ("Missing value for X coordinate column")
2561
+ raise ValueError ("Error while loading geodata: Missing value for X coordinate column")
2446
2562
  if y_col is None:
2447
- raise ValueError ("Missing value for Y coordinate column")
2563
+ raise ValueError ("Error while loading geodata: Missing value for Y coordinate column")
2448
2564
  elif data_type == "csv":
2449
2565
  geodata_tab = pd.read_csv(
2450
2566
  data,
@@ -2455,9 +2571,9 @@ def load_geodata (
2455
2571
  elif data_type == "xlsx":
2456
2572
  geodata_tab = pd.read_excel(data)
2457
2573
  else:
2458
- raise TypeError("Unknown type of data")
2574
+ raise TypeError("Error while loading geodata: Unknown type of data")
2459
2575
  else:
2460
- raise TypeError("data must be pandas.DataFrame, geopandas.GeoDataFrame or file (.csv, .xlsx, .shp)")
2576
+ raise TypeError("Error while loading geodata: Param 'data' must be pandas.DataFrame, geopandas.GeoDataFrame or file (.csv, .xlsx, .shp)")
2461
2577
 
2462
2578
  if data_type == "csv" or data_type == "xlsx" or (isinstance(data, pd.DataFrame) and not isinstance(data, gp.GeoDataFrame)):
2463
2579
 
@@ -2528,17 +2644,17 @@ def create_interaction_matrix(
2528
2644
  ):
2529
2645
 
2530
2646
  if not isinstance(customer_origins, CustomerOrigins):
2531
- raise ValueError ("customer_origins must be of class CustomerOrigins")
2647
+ raise ValueError ("Error while creating interaction matrix: customer_origins must be of class CustomerOrigins")
2532
2648
  if not isinstance(supply_locations, SupplyLocations):
2533
- raise ValueError ("supply_locations must be of class SupplyLocations")
2649
+ raise ValueError ("Error while creating interaction matrix: supply_locations must be of class SupplyLocations")
2534
2650
 
2535
2651
  customer_origins_metadata = customer_origins.get_metadata()
2536
2652
  if customer_origins_metadata["marketsize_col"] is None:
2537
- raise ValueError("Market size column in customer origins not defined. Use CustomerOrigins.define_marketsize()")
2653
+ raise ValueError("Error while creating interaction matrix: Market size column in customer origins not defined. Use CustomerOrigins.define_marketsize()")
2538
2654
 
2539
2655
  supply_locations_metadata = supply_locations.get_metadata()
2540
2656
  if supply_locations_metadata["attraction_col"][0] is None:
2541
- raise ValueError("Attraction column in supply locations not defined. Use SupplyLocations.define_attraction()")
2657
+ raise ValueError("Error while creating interaction matrix: Attraction column in supply locations not defined. Use SupplyLocations.define_attraction()")
2542
2658
 
2543
2659
  customer_origins_unique_id = customer_origins_metadata["unique_id"]
2544
2660
  customer_origins_marketsize = customer_origins_metadata["marketsize_col"]
@@ -2626,7 +2742,7 @@ def load_interaction_matrix(
2626
2742
  interaction_matrix_df = data
2627
2743
  elif isinstance(data, str):
2628
2744
  if data_type not in ["csv", "xlsx"]:
2629
- raise ValueError ("data_type must be 'csv' or 'xlsx'")
2745
+ raise ValueError ("Error while loading interaction matrix: param 'data_type' must be 'csv' or 'xlsx'")
2630
2746
  if data_type == "csv":
2631
2747
  interaction_matrix_df = pd.read_csv(
2632
2748
  data,
@@ -2643,14 +2759,14 @@ def load_interaction_matrix(
2643
2759
  else:
2644
2760
  interaction_matrix_df = pd.read_excel(data)
2645
2761
  else:
2646
- raise TypeError("Unknown type of data")
2762
+ raise TypeError("Error while loading interaction matrix: Unknown type of data")
2647
2763
  else:
2648
- raise TypeError("data must be pandas.DataFrame or file (.csv, .xlsx)")
2764
+ raise TypeError("Error while loading interaction matrix: param 'data' must be pandas.DataFrame or file (.csv, .xlsx)")
2649
2765
 
2650
2766
  if customer_origins_col not in interaction_matrix_df.columns:
2651
- raise KeyError ("Column " + customer_origins_col + " not in data")
2767
+ raise KeyError ("Error while loading interaction matrix: Column " + customer_origins_col + " not in data")
2652
2768
  if supply_locations_col not in interaction_matrix_df.columns:
2653
- raise KeyError ("Column " + supply_locations_col + " not in data")
2769
+ raise KeyError ("Error while loading interaction matrix: Column " + supply_locations_col + " not in data")
2654
2770
 
2655
2771
  cols_check = attraction_col + [transport_costs_col]
2656
2772
  if flows_col is not None:
@@ -2671,7 +2787,7 @@ def load_interaction_matrix(
2671
2787
  if isinstance(customer_origins_coords_col, str):
2672
2788
 
2673
2789
  if customer_origins_coords_col not in interaction_matrix_df.columns:
2674
- raise KeyError ("Column " + customer_origins_coords_col + " not in data.")
2790
+ raise KeyError ("Error while loading interaction matrix: Column " + customer_origins_coords_col + " not in data.")
2675
2791
 
2676
2792
  customer_origins_geodata_tab = interaction_matrix_df[[customer_origins_col, customer_origins_coords_col]]
2677
2793
  customer_origins_geodata_tab = customer_origins_geodata_tab.drop_duplicates()
@@ -2687,7 +2803,7 @@ def load_interaction_matrix(
2687
2803
  elif isinstance(customer_origins_coords_col, list):
2688
2804
 
2689
2805
  if len(customer_origins_coords_col) != 2:
2690
- raise ValueError ("Column " + customer_origins_coords_col + " must be a geometry column OR TWO columns with X and Y")
2806
+ raise ValueError ("Error while loading interaction matrix: Column " + customer_origins_coords_col + " must be a geometry column OR TWO columns with X and Y")
2691
2807
 
2692
2808
  check_vars (
2693
2809
  df = interaction_matrix_df,
@@ -2742,7 +2858,7 @@ def load_interaction_matrix(
2742
2858
  if isinstance(supply_locations_coords_col, str):
2743
2859
 
2744
2860
  if supply_locations_coords_col not in interaction_matrix_df.columns:
2745
- raise KeyError ("Column " + supply_locations_coords_col + " not in data.")
2861
+ raise KeyError ("Error while loading interaction matrix: Column " + supply_locations_coords_col + " not in data.")
2746
2862
 
2747
2863
  supply_locations_geodata_tab = interaction_matrix_df[[supply_locations_col, supply_locations_coords_col]]
2748
2864
  supply_locations_geodata_tab = supply_locations_geodata_tab.drop_duplicates()
@@ -2758,7 +2874,7 @@ def load_interaction_matrix(
2758
2874
  if isinstance(supply_locations_coords_col, list):
2759
2875
 
2760
2876
  if len(supply_locations_coords_col) != 2:
2761
- raise ValueError ("Column " + supply_locations_coords_col + " must be a geometry column OR TWO columns with X and Y")
2877
+ raise ValueError ("Error while loading interaction matrix: Column " + supply_locations_coords_col + " must be a geometry column OR TWO columns with X and Y")
2762
2878
 
2763
2879
  check_vars (
2764
2880
  df = interaction_matrix_df,
@@ -2867,7 +2983,7 @@ def load_marketareas(
2867
2983
  market_areas_df = data
2868
2984
  elif isinstance(data, str):
2869
2985
  if data_type not in ["csv", "xlsx"]:
2870
- raise ValueError ("data_type must be 'csv' or 'xlsx'")
2986
+ raise ValueError ("Error while loading market areas: data_type must be 'csv' or 'xlsx'")
2871
2987
  if data_type == "csv":
2872
2988
  market_areas_df = pd.read_csv(
2873
2989
  data,
@@ -2884,14 +3000,14 @@ def load_marketareas(
2884
3000
  else:
2885
3001
  market_areas_df = pd.read_excel(data)
2886
3002
  else:
2887
- raise TypeError("Unknown type of data")
3003
+ raise TypeError("Error while loading market areas: Unknown type of data")
2888
3004
  else:
2889
- raise TypeError("data must be pandas.DataFrame or file (.csv, .xlsx)")
3005
+ raise TypeError("Error while loading market areas: data must be pandas.DataFrame or file (.csv, .xlsx)")
2890
3006
 
2891
3007
  if supply_locations_col not in market_areas_df.columns:
2892
- raise KeyError ("Column " + supply_locations_col + " not in data")
3008
+ raise KeyError ("Error while loading market areas: Column " + supply_locations_col + " not in data")
2893
3009
  if total_col not in market_areas_df.columns:
2894
- raise KeyError ("Column " + supply_locations_col + " not in data")
3010
+ raise KeyError ("Error while loading market areas: Column " + supply_locations_col + " not in data")
2895
3011
 
2896
3012
  if check_df_vars:
2897
3013
  check_vars(
@@ -2934,7 +3050,7 @@ def market_shares(
2934
3050
  if ref_col is not None:
2935
3051
 
2936
3052
  if ref_col not in df.columns:
2937
- raise KeyError(f"Column '{ref_col}' not in dataframe.")
3053
+ raise KeyError(f"Error while calculating market shares: Column '{ref_col}' not in dataframe.")
2938
3054
 
2939
3055
  ms_refcol = pd.DataFrame(df.groupby(ref_col)[turnover_col].sum())
2940
3056
  ms_refcol = ms_refcol.rename(columns = {turnover_col: "total"})
@@ -2982,7 +3098,7 @@ def log_centering_transformation(
2982
3098
  )
2983
3099
 
2984
3100
  if ref_col not in df.columns:
2985
- raise KeyError(f"Column '{ref_col}' not in dataframe.")
3101
+ raise KeyError(f"Error in log-centering transformation: Column '{ref_col}' not in dataframe.")
2986
3102
 
2987
3103
  def lct (x):
2988
3104
 
@@ -3110,17 +3226,20 @@ def modelfit(
3110
3226
  expected_no = len(expected)
3111
3227
 
3112
3228
  if not observed_no == expected_no:
3113
- raise ValueError("Observed and expected differ in length")
3229
+ raise ValueError("Error while calculating fit metrics: Observed and expected differ in length")
3114
3230
 
3115
3231
  if not isinstance(observed, np.number):
3116
3232
  if not is_numeric_dtype(observed):
3117
- raise ValueError("Observed column is not numeric")
3233
+ raise ValueError("Error while calculating fit metrics: Observed column is not numeric")
3118
3234
  if not isinstance(expected, np.number):
3119
3235
  if not is_numeric_dtype(expected):
3120
- raise ValueError("Expected column is not numeric")
3236
+ raise ValueError("Error while calculating fit metrics: Expected column is not numeric")
3121
3237
 
3122
3238
  if remove_nan:
3123
3239
 
3240
+ observed = observed.reset_index(drop=True)
3241
+ expected = expected.reset_index(drop=True)
3242
+
3124
3243
  obs_exp = pd.DataFrame(
3125
3244
  {
3126
3245
  "observed": observed,
@@ -3140,9 +3259,9 @@ def modelfit(
3140
3259
  else:
3141
3260
 
3142
3261
  if np.isnan(observed).any():
3143
- raise ValueError("Vector with observed data contains NaN")
3262
+ raise ValueError("Error while calculating fit metrics: Vector with observed data contains NaN and 'remove_nan' is False")
3144
3263
  if np.isnan(expected).any():
3145
- raise ValueError("Vector with expected data contains NaN")
3264
+ raise ValueError("Error while calculating fit metrics: Vector with expected data contains NaN and 'remove_nan' is False")
3146
3265
 
3147
3266
  residuals = np.array(observed)-np.array(expected)
3148
3267
  residuals_sq = residuals**2
huff/tests/tests_huff.py CHANGED
@@ -4,12 +4,13 @@
4
4
  # Author: Thomas Wieland
5
5
  # ORCID: 0000-0001-5168-9846
6
6
  # mail: geowieland@googlemail.com
7
- # Version: 1.5.2
8
- # Last update: 2025-07-02 21:10
7
+ # Version: 1.5.3
8
+ # Last update: 2025-07-15 17:22
9
9
  # Copyright (c) 2025 Thomas Wieland
10
10
  #-----------------------------------------------------------------------
11
11
 
12
12
  from huff.models import create_interaction_matrix, get_isochrones, load_geodata, load_interaction_matrix, load_marketareas, market_shares, modelfit
13
+ from huff.models import HuffModel
13
14
  from huff.osm import map_with_basemap
14
15
  from huff.gistools import buffers, point_spatial_join
15
16
 
@@ -140,6 +141,10 @@ huff_model_fit = haslach_interactionmatrix.marketareas()
140
141
  # Calculcation of total market areas
141
142
  # Result of class HuffModel
142
143
 
144
+ bootstrap_cis = huff_model_fit.confint(repeats=10)
145
+ print(bootstrap_cis)
146
+ # Confidence intervals for estimated parameters
147
+
143
148
  huff_model_fit.summary()
144
149
  # Huff model summary
145
150
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: huff
3
- Version: 1.5.2
3
+ Version: 1.5.3
4
4
  Summary: huff: Huff Model Market Area Analysis
5
5
  Author: Thomas Wieland
6
6
  Author-email: geowieland@googlemail.com
@@ -18,7 +18,7 @@ Requires-Dist: openpyxl
18
18
 
19
19
  # huff: Huff Model Market Area Analysis
20
20
 
21
- This Python library is designed for performing market area analyses with the Huff Model (Huff 1962, 1964) and/or the Multiplicative Competitive Interaction (MCI) Model (Nakanishi and Cooper 1974, 1982). Users may load point shapefiles (or CSV, XLSX) of customer origins and supply locations and conduct a market area analysis step by step. The package also includes supplementary GIS functions, including clients for OpenRouteService(1) for network analysis (e.g., transport cost matrix) and OpenStreetMap(2) for simple maps. See Huff and McCallum (2008) or Wieland (2017) for a description of the models and their practical application.
21
+ This Python library is designed for performing market area analyses with the Huff Model (Huff 1962, 1964) and/or the Multiplicative Competitive Interaction (MCI) Model (Nakanishi and Cooper 1974, 1982). Users may load point shapefiles (or CSV, XLSX) of customer origins and supply locations and conduct a market area analysis step by step. The library supports parameter estimation based on empirical customer data using the MCI model and Maximum Likelihood. The package also includes supplementary GIS functions, including clients for OpenRouteService(1) for network analysis (e.g., transport cost matrix) and OpenStreetMap(2) for simple maps. See Huff and McCallum (2008) or Wieland (2017) for a description of the models and their practical application.
22
22
 
23
23
 
24
24
  ## Author
@@ -28,10 +28,12 @@ Thomas Wieland [ORCID](https://orcid.org/0000-0001-5168-9846) [EMail](mailto:geo
28
28
  See the /tests directory for usage examples of most of the included functions.
29
29
 
30
30
 
31
- ## Updates v1.5.2
31
+ ## Updates v1.5.3
32
32
  - Bugfixes:
33
- - HuffModel.ml_fit(): Correct values of expected T_j, corrected calculation of model fit metrices when fit_by="totals"
34
- - HuffModel.ml_fit(): Check if sum of E_ij != sum of T_j
33
+ - InteractionMatrix.summary() and HuffModel.summary(): No KeyError when param is None anymore
34
+ - Extensions:
35
+ - Confidence intervals (bootstrap) for Huff Model ML estimations
36
+ - ValueError and KeyError are more precise (function is included)
35
37
 
36
38
 
37
39
  ## Features
@@ -1,10 +1,10 @@
1
1
  huff/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
2
  huff/gistools.py,sha256=fgeE1IsUO7UIaawb23kuiz_Rlxn7T18iLLTA5yvgp74,7038
3
- huff/models.py,sha256=e8aILi45qcJ9tvHJfKIFKWfD-DYXjZQ0gXOS4MpG7Ks,125430
3
+ huff/models.py,sha256=mPASlL0YA8x-cnhoRgrpr1sP-p5gGg1_cwM-QGf8GfU,133310
4
4
  huff/ors.py,sha256=JlO2UEishQX87PIiktksOrVT5QdB-GEWgjXcxoR_KuA,11929
5
5
  huff/osm.py,sha256=9A-7hxeZyjA2r8w2_IqqwH14qq2Y9AS1GxVKOD7utqs,7747
6
6
  huff/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
- huff/tests/tests_huff.py,sha256=xHCR087rqLNWDFfZhi1giKDzffCx3IemWQmHrAUYxFw,12956
7
+ huff/tests/tests_huff.py,sha256=FMnkSs7id4KcJei71DRLNPbY7cvOVjALwYzcBKCm1Ao,13116
8
8
  huff/tests/data/Haslach.cpg,sha256=OtMDH1UDpEBK-CUmLugjLMBNTqZoPULF3QovKiesmCQ,5
9
9
  huff/tests/data/Haslach.dbf,sha256=GVPIt05OzDO7UrRDcsMhiYWvyXAPg6Z-qkiysFzj-fc,506
10
10
  huff/tests/data/Haslach.prj,sha256=2Jy1Vlzh7UxQ1MXpZ9UYLs2SxfrObj2xkEkZyLqmGTY,437
@@ -24,7 +24,7 @@ huff/tests/data/Haslach_supermarkets.qmd,sha256=JlcOYzG4vI1NH1IuOpxwIPnJsCyC-pDR
24
24
  huff/tests/data/Haslach_supermarkets.shp,sha256=X7QbQ0BTMag_B-bDRbpr-go2BQIXo3Y8zMAKpYZmlps,324
25
25
  huff/tests/data/Haslach_supermarkets.shx,sha256=j23QHX-SmdAeN04rw0x8nUOran-OCg_T6r_LvzzEPWs,164
26
26
  huff/tests/data/Wieland2015.xlsx,sha256=H4rxCFlctn44-O6mIyeFf67FlgvznLX7xZqpoWYS41A,25788
27
- huff-1.5.2.dist-info/METADATA,sha256=XnlmcfscK8c1P3EN40W8JcQnFE7AkWDT4NqLR9skTIY,5956
28
- huff-1.5.2.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
29
- huff-1.5.2.dist-info/top_level.txt,sha256=nlzX-PxZNFmIxANIJMySuIFPihd6qOBkRlhIC28NEsQ,5
30
- huff-1.5.2.dist-info/RECORD,,
27
+ huff-1.5.3.dist-info/METADATA,sha256=vqlH9tlDz5pHu85fYu1Bnprk9yiWzAPRY94rQTQxFGM,6142
28
+ huff-1.5.3.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
29
+ huff-1.5.3.dist-info/top_level.txt,sha256=nlzX-PxZNFmIxANIJMySuIFPihd6qOBkRlhIC28NEsQ,5
30
+ huff-1.5.3.dist-info/RECORD,,
File without changes