huff 1.3.4__py3-none-any.whl → 1.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
huff/gistools.py CHANGED
@@ -4,8 +4,8 @@
4
4
  # Author: Thomas Wieland
5
5
  # ORCID: 0000-0001-5168-9846
6
6
  # mail: geowieland@googlemail.com
7
- # Version: 1.3.4
8
- # Last update: 2025-06-02 17:17
7
+ # Version: 1.4.0
8
+ # Last update: 2025-06-10 17:16
9
9
  # Copyright (c) 2025 Thomas Wieland
10
10
  #-----------------------------------------------------------------------
11
11
 
@@ -168,8 +168,13 @@ def point_spatial_join(
168
168
  point_stat_col: str = None
169
169
  ):
170
170
 
171
+ if polygon_gdf is None:
172
+ raise ValueError("Parameter 'polygon_gdf' is None")
173
+ if point_gdf is None:
174
+ raise ValueError("Parameter 'point_gdf' is None")
175
+
171
176
  if polygon_gdf.crs != point_gdf.crs:
172
- raise ValueError (f"Coordinate reference systems of polygon and point data do not match. Polygons: {str(polygon_gdf.crs)}, points: {str(point_gdf.crs)}")
177
+ raise ValueError(f"Coordinate reference systems of polygon and point data do not match. Polygons: {str(polygon_gdf.crs)}, points: {str(point_gdf.crs)}")
173
178
 
174
179
  if polygon_ref_cols != []:
175
180
  for polygon_ref_col in polygon_ref_cols:
huff/models.py CHANGED
@@ -4,8 +4,8 @@
4
4
  # Author: Thomas Wieland
5
5
  # ORCID: 0000-0001-5168-9846
6
6
  # mail: geowieland@googlemail.com
7
- # Version: 1.3.4
8
- # Last update: 2025-06-02 17:17
7
+ # Version: 1.4.0
8
+ # Last update: 2025-06-10 17:17
9
9
  # Copyright (c) 2025 Thomas Wieland
10
10
  #-----------------------------------------------------------------------
11
11
 
@@ -17,8 +17,10 @@ from math import sqrt
17
17
  import time
18
18
  from pandas.api.types import is_numeric_dtype
19
19
  from statsmodels.formula.api import ols
20
+ from scipy.optimize import minimize
20
21
  from shapely.geometry import Point
21
22
  from shapely import wkt
23
+ import copy
22
24
  from huff.ors import Client, TimeDistanceMatrix, Isochrone
23
25
  from huff.gistools import overlay_difference, distance_matrix, buffers
24
26
 
@@ -73,10 +75,12 @@ class CustomerOrigins:
73
75
  print("Market size column " + metadata["marketsize_col"])
74
76
 
75
77
  if metadata["weighting"][0]["func"] is None and metadata["weighting"][0]["param"] is None:
76
- print("Transport cost weighting not defined")
77
- else:
78
- print("Transport cost weighting " + metadata["weighting"][0]["func"] + " with lambda = " + str(metadata["weighting"][0]["param"]))
79
-
78
+ print("Transport cost weighting not defined")
79
+ elif metadata["weighting"][0]["func"] in ["power", "exponential"]:
80
+ print("Transport cost weighting " + str(metadata["weighting"][0]["param"]) + " (" + metadata["weighting"][0]["func"] + ")")
81
+ elif metadata["weighting"][0]["func"] == "logistic":
82
+ print("Transport cost weighting " + str(metadata["weighting"][0]["param"][0]) + ", " + str(metadata["weighting"][0]["param"][1]) + " (" + metadata["weighting"][0]["func"] + ")")
83
+
80
84
  print("Unique ID column " + metadata["unique_id"])
81
85
  print("Input CRS " + str(metadata["crs_input"]))
82
86
 
@@ -117,11 +121,24 @@ class CustomerOrigins:
117
121
 
118
122
  metadata = self.metadata
119
123
 
124
+ if func not in ["power", "exponential", "logistic"]:
125
+ raise ValueError("Parameter 'func' must be 'power', 'exponential' or 'logistic'")
126
+
127
+ if isinstance(param_lambda, list) and func != "logistic":
128
+ raise ValueError("Function type "+ func + " requires one single parameter value")
129
+
130
+ if isinstance(param_lambda, (int, float)) and func == "logistic":
131
+ raise ValueError("Function type "+ func + " requires two parameters in a list")
132
+
120
133
  metadata["weighting"][0]["func"] = func
121
- metadata["weighting"][0]["param"] = param_lambda
122
134
 
123
- self.metadata = metadata
135
+ if isinstance(param_lambda, list):
136
+ metadata["weighting"][0]["param"] = [float(param_lambda[0]), float(param_lambda[1])]
137
+ else:
138
+ metadata["weighting"][0]["param"] = float(param_lambda)
124
139
 
140
+ self.metadata = metadata
141
+
125
142
  return self
126
143
 
127
144
  def isochrones(
@@ -284,7 +301,7 @@ class SupplyLocations:
284
301
  raise ValueError ("Attraction column is not yet defined. Use SupplyLocations.define_attraction()")
285
302
 
286
303
  metadata["weighting"][0]["func"] = func
287
- metadata["weighting"][0]["param"] = param_gamma
304
+ metadata["weighting"][0]["param"] = float(param_gamma)
288
305
  self.metadata = metadata
289
306
 
290
307
  return self
@@ -464,10 +481,14 @@ class InteractionMatrix:
464
481
  print("Attraction not defined")
465
482
  else:
466
483
  print("Attraction " + str(supply_locations_metadata["weighting"][0]["param"]) + " (" + supply_locations_metadata["weighting"][0]["func"] + ")")
484
+
467
485
  if customer_origins_metadata["weighting"][0]["func"] is None and customer_origins_metadata["weighting"][0]["param"] is None:
468
486
  print("Transport costs not defined")
469
- else:
470
- print("Transport costs " + str(customer_origins_metadata["weighting"][0]["param"]) + " (" + customer_origins_metadata["weighting"][0]["func"] + ")")
487
+ elif customer_origins_metadata["weighting"][0]["func"] in ["power", "exponential"]:
488
+ print("Transport costs " + str(customer_origins_metadata["weighting"][0]["param"]) + " (" + customer_origins_metadata["weighting"][0]["func"] + ")")
489
+ elif customer_origins_metadata["weighting"][0]["func"] == "logistic":
490
+ print("Transport costs " + str(customer_origins_metadata["weighting"][0]["param"][0]) + ", " + str(customer_origins_metadata["weighting"][0]["param"][1]) + " (" + customer_origins_metadata["weighting"][0]["func"] + ")")
491
+
471
492
  print("----------------------------------")
472
493
 
473
494
  def transport_costs(
@@ -592,6 +613,8 @@ class InteractionMatrix:
592
613
  interaction_matrix_df["t_ij_weighted"] = interaction_matrix_df["t_ij"] ** tc_weighting["param"]
593
614
  elif tc_weighting["func"] == "exponential":
594
615
  interaction_matrix_df["t_ij_weighted"] = np.exp(tc_weighting["param"] * interaction_matrix_df['t_ij'])
616
+ elif tc_weighting["func"] == "logistic":
617
+ interaction_matrix_df["t_ij_weighted"] = 1+np.exp(tc_weighting["param"][0] + tc_weighting["param"][1] * interaction_matrix_df['t_ij'])
595
618
  else:
596
619
  raise ValueError ("Transport costs weighting is not defined.")
597
620
 
@@ -681,7 +704,6 @@ class InteractionMatrix:
681
704
 
682
705
  return huff_model
683
706
 
684
-
685
707
  def hansen(
686
708
  self,
687
709
  from_origins: bool = True
@@ -689,16 +711,34 @@ class InteractionMatrix:
689
711
 
690
712
  interaction_matrix_df = self.interaction_matrix_df
691
713
 
692
- if interaction_matrix_df["U_ij"].isna().all():
693
- self.utility()
694
- interaction_matrix_df = self.interaction_matrix_df
695
-
696
714
  if from_origins:
715
+
716
+ if interaction_matrix_df["U_ij"].isna().all():
717
+ self.utility()
718
+ interaction_matrix_df = self.interaction_matrix_df
719
+
697
720
  hansen_df = pd.DataFrame(interaction_matrix_df.groupby("i")["U_ij"].sum()).reset_index()
698
721
  hansen_df = hansen_df.rename(columns = {"U_ij": "A_i"})
722
+
699
723
  else:
700
- hansen_df = pd.DataFrame(interaction_matrix_df.groupby("j")["U_ij"].sum()).reset_index()
701
- hansen_df = hansen_df.rename(columns = {"U_ij": "A_j"})
724
+
725
+ if "C_i" not in interaction_matrix_df.columns or interaction_matrix_df["C_i"].isna().all():
726
+ raise ValueError("Customer origins market size is not available")
727
+
728
+ customer_origins_metadata = self.customer_origins.get_metadata()
729
+ tc_weighting = customer_origins_metadata["weighting"][0]
730
+ if tc_weighting["func"] == "power":
731
+ interaction_matrix_df["t_ij_weighted"] = interaction_matrix_df["t_ij"] ** tc_weighting["param"]
732
+ elif tc_weighting["func"] == "exponential":
733
+ interaction_matrix_df["t_ij_weighted"] = np.exp(tc_weighting["param"] * interaction_matrix_df['t_ij'])
734
+ elif tc_weighting["func"] == "logistic":
735
+ interaction_matrix_df["t_ij_weighted"] = 1+np.exp(tc_weighting["param"][0] + tc_weighting["param"][1] * interaction_matrix_df['t_ij'])
736
+ else:
737
+ raise ValueError ("Transport costs weighting is not defined.")
738
+
739
+ interaction_matrix_df["U_ji"] = interaction_matrix_df["C_i"]*interaction_matrix_df["t_ij_weighted"]
740
+ hansen_df = pd.DataFrame(interaction_matrix_df.groupby("j")["U_ji"].sum()).reset_index()
741
+ hansen_df = hansen_df.rename(columns = {"U_ji": "A_j"})
702
742
 
703
743
  return hansen_df
704
744
 
@@ -857,16 +897,22 @@ class HuffModel:
857
897
  else:
858
898
  print("Market size column " + customer_origins_metadata["marketsize_col"])
859
899
  print("----------------------------------")
900
+
860
901
  print("Partial utilities")
861
902
  print(" Weights")
903
+
862
904
  if supply_locations_metadata["weighting"][0]["func"] is None and supply_locations_metadata["weighting"][0]["param"] is None:
863
905
  print("Attraction not defined")
864
906
  else:
865
907
  print("Attraction " + str(supply_locations_metadata["weighting"][0]["param"]) + " (" + supply_locations_metadata["weighting"][0]["func"] + ")")
908
+
866
909
  if customer_origins_metadata["weighting"][0]["func"] is None and customer_origins_metadata["weighting"][0]["param"] is None:
867
910
  print("Transport costs not defined")
868
- else:
869
- print("Transport costs " + str(customer_origins_metadata["weighting"][0]["param"]) + " (" + customer_origins_metadata["weighting"][0]["func"] + ")")
911
+ elif customer_origins_metadata["weighting"][0]["func"] in ["power", "exponential"]:
912
+ print("Transport costs " + str(customer_origins_metadata["weighting"][0]["param"]) + " (" + customer_origins_metadata["weighting"][0]["func"] + ")")
913
+ elif customer_origins_metadata["weighting"][0]["func"] == "logistic":
914
+ print("Transport costs " + str(customer_origins_metadata["weighting"][0]["param"][0]) + ", " + str(customer_origins_metadata["weighting"][0]["param"][1]) + " (" + customer_origins_metadata["weighting"][0]["func"] + ")")
915
+
870
916
  print("----------------------------------")
871
917
 
872
918
  def mci_fit(
@@ -876,15 +922,14 @@ class HuffModel:
876
922
  ):
877
923
 
878
924
  interaction_matrix = self.interaction_matrix
925
+ interaction_matrix_df = interaction_matrix.get_interaction_matrix_df()
879
926
 
880
927
  supply_locations = interaction_matrix.get_supply_locations()
881
928
  supply_locations_metadata = supply_locations.get_metadata()
882
929
 
883
930
  customer_origins = interaction_matrix.get_customer_origins()
884
931
  customer_origins_metadata = customer_origins.get_metadata()
885
-
886
- interaction_matrix_df = interaction_matrix.get_interaction_matrix_df()
887
-
932
+
888
933
  cols_t = [col + "__LCT" for col in cols]
889
934
 
890
935
  if "p_ij__LCT" not in interaction_matrix_df.columns:
@@ -949,6 +994,131 @@ class HuffModel:
949
994
 
950
995
  return mci_model
951
996
 
997
+ def huff_loglik(
998
+ self,
999
+ params
1000
+ ):
1001
+
1002
+ if not isinstance(params, list):
1003
+ if isinstance(params, np.ndarray):
1004
+ params = params.tolist()
1005
+ else:
1006
+ raise ValueError("Parameter 'params' must be a list or np.ndarray with two or three parameter values")
1007
+
1008
+ if len(params) == 2:
1009
+ param_gamma, param_lambda = params
1010
+ elif len(params) == 3:
1011
+ param_gamma, param_lambda, param_lambda2 = params
1012
+ else:
1013
+ raise ValueError("Parameter 'params' must be a list with two or three parameter values")
1014
+
1015
+ interaction_matrix = self.interaction_matrix
1016
+ interaction_matrix_df = interaction_matrix.get_interaction_matrix_df()
1017
+
1018
+ supply_locations = interaction_matrix.get_supply_locations()
1019
+ supply_locations_metadata = supply_locations.get_metadata()
1020
+
1021
+ customer_origins = interaction_matrix.get_customer_origins()
1022
+ customer_origins_metadata = customer_origins.get_metadata()
1023
+
1024
+ supply_locations_metadata["weighting"][0]["param"] = float(param_gamma)
1025
+ supply_locations.metadata = supply_locations_metadata
1026
+
1027
+ if customer_origins_metadata["weighting"][0]["func"] in ["power", "exponential"]:
1028
+
1029
+ if len(params) == 2:
1030
+ customer_origins_metadata["weighting"][0]["param"] = float(param_lambda)
1031
+ else:
1032
+ raise ValueError ("Huff Model with transport cost weightig of type " + customer_origins_metadata["weighting"]["func"] + " must have two input parameters")
1033
+
1034
+ elif customer_origins_metadata["weighting"][0]["func"] == "logistic":
1035
+
1036
+ if len(params) == 3:
1037
+ customer_origins_metadata["weighting"][0]["param"] = [float(param_lambda), float(param_lambda2)]
1038
+ else:
1039
+ raise ValueError("Huff Model with transport cost weightig of type " + customer_origins_metadata["weighting"]["func"] + " must have three input parameters")
1040
+
1041
+ customer_origins.metadata = customer_origins_metadata
1042
+
1043
+ interaction_matrix = self.interaction_matrix
1044
+ interaction_matrix_df = interaction_matrix.get_interaction_matrix_df()
1045
+
1046
+ p_ij_emp = interaction_matrix_df["p_ij"]
1047
+
1048
+ interaction_matrix_copy = copy.deepcopy(interaction_matrix)
1049
+
1050
+ interaction_matrix_copy.utility()
1051
+ interaction_matrix_copy.probabilities()
1052
+
1053
+ interaction_matrix_df_copy = interaction_matrix_copy.get_interaction_matrix_df()
1054
+ p_ij = interaction_matrix_df_copy["p_ij"]
1055
+
1056
+ LL = loglik(
1057
+ observed = p_ij_emp,
1058
+ expected = p_ij
1059
+ )
1060
+
1061
+ return LL
1062
+
1063
+ def ml_fit(
1064
+ self,
1065
+ initial_params = [1.0, -2.0],
1066
+ bounds = [(0.5, 1), (-3, -1)],
1067
+ method = "L-BFGS-B"
1068
+ ):
1069
+
1070
+ if len(initial_params) > 3 or len(initial_params) < 2:
1071
+ raise ValueError("Parameter 'initial_params' must be a list with two or three entries")
1072
+
1073
+ if len(bounds) != len(initial_params):
1074
+ raise ValueError("Parameter 'bounds' must have the same length as parameter 'initial_params' (" + str(len(bounds)) + ", " + str(len(initial_params)) + ")")
1075
+
1076
+ ml_result = minimize(
1077
+ self.huff_loglik,
1078
+ initial_params,
1079
+ method = method,
1080
+ bounds = bounds,
1081
+ options={'disp': 3}
1082
+ )
1083
+
1084
+ if ml_result.success:
1085
+
1086
+ fitted_params = ml_result.x
1087
+
1088
+ if len(initial_params) == 2:
1089
+
1090
+ param_gamma = fitted_params[0]
1091
+ param_lambda = fitted_params[1]
1092
+ param_results = [
1093
+ float(param_gamma),
1094
+ float(param_lambda)
1095
+ ]
1096
+
1097
+ elif len (initial_params) == 3:
1098
+
1099
+ param_gamma = fitted_params[0]
1100
+ param_lambda = fitted_params[1]
1101
+ param_lambda2 = fitted_params[2]
1102
+ param_results = [
1103
+ float(param_gamma),
1104
+ float(param_lambda),
1105
+ float(param_lambda2)
1106
+ ]
1107
+
1108
+ else:
1109
+
1110
+ param_gamma = None
1111
+ param_lambda = None
1112
+ param_results = [param_gamma, param_lambda]
1113
+
1114
+ if len(initial_params) == 3:
1115
+ param_lambda2 = None
1116
+ param_results.append(param_lambda2)
1117
+
1118
+ print(f"Optimiziation via {method} algorithm failed with error message: '{ml_result.message}'. See https://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.minimize.html for all available algorithms.")
1119
+
1120
+ return param_results
1121
+
952
1122
 
953
1123
  class MCIModel:
954
1124
 
@@ -1348,6 +1518,10 @@ def create_interaction_matrix(
1348
1518
 
1349
1519
  customer_origins_geodata_gpd = pd.DataFrame(customer_origins.get_geodata_gpd())
1350
1520
  customer_origins_geodata_gpd_original = pd.DataFrame(customer_origins.get_geodata_gpd_original())
1521
+
1522
+ customer_origins_geodata_gpd[customer_origins_unique_id] = customer_origins_geodata_gpd[customer_origins_unique_id].astype(str)
1523
+ customer_origins_geodata_gpd_original[customer_origins_unique_id] = customer_origins_geodata_gpd_original[customer_origins_unique_id].astype(str)
1524
+
1351
1525
  customer_origins_data = pd.merge(
1352
1526
  customer_origins_geodata_gpd,
1353
1527
  customer_origins_geodata_gpd_original[[customer_origins_unique_id, customer_origins_marketsize]],
@@ -1777,6 +1951,7 @@ def modelfit(
1777
1951
 
1778
1952
  SQR = float(np.sum(residuals_sq))
1779
1953
  SAR = float(np.sum(residuals_abs))
1954
+ LL = float(np.sum(np.log(residuals_sq)))
1780
1955
  observed_mean = float(np.sum(observed)/observed_no)
1781
1956
  SQT = float(np.sum((observed-observed_mean)**2))
1782
1957
  Rsq = float(1-(SQR/SQT))
@@ -1816,6 +1991,20 @@ def modelfit(
1816
1991
 
1817
1992
  return modelfit_results
1818
1993
 
1994
+ def loglik(
1995
+ observed,
1996
+ expected
1997
+ ):
1998
+
1999
+ model_fit = modelfit(
2000
+ observed,
2001
+ expected
2002
+ )
2003
+ residuals_sq = model_fit[0]["residuals_sq"]
2004
+
2005
+ LL = np.sum(np.log(residuals_sq))
2006
+
2007
+ return -LL
1819
2008
 
1820
2009
  def check_vars(
1821
2010
  df: pd.DataFrame,
huff/ors.py CHANGED
@@ -4,8 +4,8 @@
4
4
  # Author: Thomas Wieland
5
5
  # ORCID: 0000-0001-5168-9846
6
6
  # mail: geowieland@googlemail.com
7
- # Version: 1.3.4
8
- # Last update: 2025-06-01 17:18
7
+ # Version: 1.4.0
8
+ # Last update: 2025-06-10 17:17
9
9
  # Copyright (c) 2025 Thomas Wieland
10
10
  #-----------------------------------------------------------------------
11
11
 
huff/osm.py CHANGED
@@ -4,8 +4,8 @@
4
4
  # Author: Thomas Wieland
5
5
  # ORCID: 0000-0001-5168-9846
6
6
  # mail: geowieland@googlemail.com
7
- # Version: 1.3.4
8
- # Last update: 2025-06-02 17:18
7
+ # Version: 1.4.0
8
+ # Last update: 2025-06-10 17:17
9
9
  # Copyright (c) 2025 Thomas Wieland
10
10
  #-----------------------------------------------------------------------
11
11
 
@@ -19,6 +19,7 @@ import time
19
19
  import os
20
20
  from PIL import Image
21
21
  import matplotlib.pyplot as plt
22
+ from matplotlib.patches import Patch
22
23
  import contextily as cx
23
24
  from shapely.geometry import box
24
25
 
@@ -142,108 +143,124 @@ def map_with_basemap(
142
143
  styles: dict = {},
143
144
  save_output: bool = True,
144
145
  output_filepath: str = "osm_map_with_basemap.png",
145
- output_dpi = 300
146
- ):
147
-
146
+ output_dpi=300,
147
+ legend: bool = True
148
+ ):
148
149
  if not layers:
149
150
  raise ValueError("List layers is empty")
150
151
 
151
152
  combined = gpd.GeoDataFrame(
152
- pd.concat(
153
- layers,
154
- ignore_index = True
155
- ),
156
- crs = layers[0].crs
153
+ pd.concat(layers, ignore_index=True),
154
+ crs=layers[0].crs
157
155
  )
158
156
 
159
157
  combined_wgs84 = combined.to_crs(epsg=4326)
160
158
  bounds = combined_wgs84.total_bounds
161
159
 
162
160
  sw_lon, sw_lat, ne_lon, ne_lat = bounds[0]*0.9999, bounds[1]*0.9999, bounds[2]*1.0001, bounds[3]*1.0001
163
-
164
- if osm_basemap:
165
161
 
162
+ if osm_basemap:
166
163
  get_basemap(sw_lat, sw_lon, ne_lat, ne_lon, zoom=zoom)
167
164
 
168
165
  fig, ax = plt.subplots(figsize=(10, 10))
169
166
 
170
167
  if osm_basemap:
171
-
172
168
  img = Image.open("osm_map.png")
173
169
  extent_img = [sw_lon, ne_lon, sw_lat, ne_lat]
174
170
  ax.imshow(img, extent=extent_img, origin="upper")
175
171
 
176
172
  i = 0
173
+ legend_handles = []
174
+
177
175
  for layer in layers:
178
-
179
176
  layer_3857 = layer.to_crs(epsg=3857)
180
-
177
+
181
178
  if styles != {}:
182
-
183
179
  layer_style = styles[i]
184
180
  layer_color = layer_style["color"]
185
181
  layer_alpha = layer_style["alpha"]
186
-
182
+ layer_name = layer_style["name"]
183
+
187
184
  if isinstance(layer_color, str):
188
-
189
185
  layer_3857.plot(
190
186
  ax=ax,
191
187
  color=layer_color,
192
- alpha=layer_alpha
193
- )
194
-
188
+ alpha=layer_alpha,
189
+ label=layer_name
190
+ )
191
+ if legend:
192
+ patch = Patch(
193
+ facecolor=layer_color,
194
+ alpha=layer_alpha,
195
+ label=layer_name
196
+ )
197
+ legend_handles.append(patch)
198
+
195
199
  elif isinstance(layer_color, dict):
196
-
197
200
  color_key = list(layer_color.keys())[0]
198
201
  color_mapping = layer_color[color_key]
199
-
202
+
200
203
  if color_key not in layer_3857.columns:
201
- raise KeyError ("Column " + color_key + " not in layer.")
202
-
204
+ raise KeyError("Column " + color_key + " not in layer.")
205
+
203
206
  for value, color in color_mapping.items():
204
207
 
205
208
  subset = layer_3857[layer_3857[color_key].astype(str) == str(value)]
206
209
 
207
210
  if not subset.empty:
211
+
208
212
  subset.plot(
209
213
  ax=ax,
210
214
  color=color,
211
- alpha=layer_alpha
215
+ alpha=layer_alpha,
216
+ label=str(value)
212
217
  )
213
-
218
+
219
+ if legend:
220
+ patch = Patch(facecolor=color, alpha=layer_alpha, label=str(value))
221
+ legend_handles.append(patch)
222
+
214
223
  else:
215
-
216
- layer_3857.plot(
217
- ax=ax,
218
- alpha=0.6
219
- )
220
224
 
221
- i = i+1
225
+ layer_3857.plot(ax=ax, alpha=0.6, label=f"Layer {i+1}")
226
+
227
+ if legend:
228
+
229
+ patch = Patch(
230
+ facecolor="gray",
231
+ alpha=0.6,
232
+ label=f"Layer {i+1}"
233
+ )
234
+
235
+ legend_handles.append(patch)
236
+
237
+ i += 1
222
238
 
223
239
  bbox = box(sw_lon, sw_lat, ne_lon, ne_lat)
224
240
  extent_geom = gpd.GeoSeries([bbox], crs=4326).to_crs(epsg=3857).total_bounds
225
-
226
241
  ax.set_xlim(extent_geom[0], extent_geom[2])
227
242
  ax.set_ylim(extent_geom[1], extent_geom[3])
228
243
 
229
244
  if osm_basemap:
230
245
  cx.add_basemap(
231
- ax,
232
- source=cx.providers.OpenStreetMap.Mapnik,
246
+ ax,
247
+ source=cx.providers.OpenStreetMap.Mapnik,
233
248
  zoom=zoom
234
249
  )
235
250
 
236
251
  plt.axis('off')
252
+
253
+ if legend and legend_handles:
254
+ ax.legend(handles=legend_handles, loc='lower right', fontsize='small', frameon=True)
255
+
237
256
  plt.show()
238
-
257
+
239
258
  if save_output:
240
-
241
259
  plt.savefig(
242
- output_filepath,
243
- dpi = output_dpi,
260
+ output_filepath,
261
+ dpi=output_dpi,
244
262
  bbox_inches="tight"
245
- )
246
-
263
+ )
247
264
  plt.close()
248
265
 
249
266
  if os.path.exists("osm_map.png"):
huff/tests/tests_huff.py CHANGED
@@ -4,8 +4,8 @@
4
4
  # Author: Thomas Wieland
5
5
  # ORCID: 0000-0001-5168-9846
6
6
  # mail: geowieland@googlemail.com
7
- # Version: 1.3.4
8
- # Last update: 2025-06-02 17:17
7
+ # Version: 1.4.0
8
+ # Last update: 2025-06-10 17:16
9
9
  # Copyright (c) 2025 Thomas Wieland
10
10
  #-----------------------------------------------------------------------
11
11
 
@@ -39,7 +39,11 @@ Haslach.define_marketsize("pop")
39
39
  # Definition of market size variable
40
40
 
41
41
  Haslach.define_transportcosts_weighting(
42
- param_lambda = -2.2
42
+ #param_lambda = -2.2,
43
+ # one weighting parameter for power function (default)
44
+ param_lambda = [10, -0.5],
45
+ func="logistic"
46
+ # two weighting parameters for logistic function
43
47
  )
44
48
  # Definition of transport costs weighting (lambda)
45
49
 
@@ -96,8 +100,9 @@ haslach_interactionmatrix = create_interaction_matrix(
96
100
  # Creating interaction matrix
97
101
 
98
102
  interaction_matrix = haslach_interactionmatrix.transport_costs(
99
- ors_auth="5b3ce3597851110001cf62480a15aafdb5a64f4d91805929f8af6abd"
100
- #network=False
103
+ #ors_auth="5b3ce3597851110001cf62480a15aafdb5a64f4d91805929f8af6abd"
104
+ network=False
105
+ # set network = True to calculate transport costs matrix via ORS API (default)
101
106
  )
102
107
  # Obtaining transport costs (default: driving-car)
103
108
  # ORS API documentation: https://openrouteservice.org/dev/#/api-docs/v2/
@@ -111,19 +116,20 @@ print(interaction_matrix.hansen())
111
116
  interaction_matrix = interaction_matrix.flows()
112
117
  # Calculating spatial flows for interaction matrix
113
118
 
114
- huff_model = interaction_matrix.marketareas()
115
- # Calculating total market areas for interaction matrix
116
- # Result of class HuffModel
117
-
118
- interaction_matrix = interaction_matrix.flows()
119
- # Calculating spatial flows
120
-
121
119
  huff_model = interaction_matrix.marketareas()
122
120
  # Calculating total market areas
121
+ # Result of class HuffModel
123
122
 
124
123
  huff_model.summary()
125
124
  # Summary of Huff model
126
125
 
126
+ huff_model_mlfit = huff_model.ml_fit(
127
+ initial_params=[1, 10, -0.5],
128
+ bounds = [(0, 1), (7, 12), (-0.7, -0.1)],
129
+ )
130
+ print(huff_model_mlfit)
131
+ # Maximum Likelihood fit for Huff Model
132
+
127
133
  print(huff_model.get_market_areas_df())
128
134
  # Showing total market areas
129
135
 
@@ -246,30 +252,30 @@ map_with_basemap(
246
252
  Haslach_supermarkets_gdf
247
253
  ],
248
254
  styles={
249
- 0: {
255
+ 0: {"name": "Isochrones",
250
256
  "color": {
251
257
  "segm_min": {
252
- "3": "green",
253
- "6": "yellow",
254
- "9": "orange",
255
- "12": "red",
256
- "13": "darkred"
258
+ "3": "midnightblue",
259
+ "6": "blue",
260
+ "9": "dodgerblue",
261
+ "12": "deepskyblue",
262
+ "15": "aqua"
257
263
  }
258
264
  },
259
265
  "alpha": 0.3
260
266
  },
261
- 1: {
262
- "color": "green",
267
+ 1: {"name": "Districts",
268
+ "color": "black",
263
269
  "alpha": 1
264
270
  },
265
- 2: {
271
+ 2: {"name": "Supermarket chains",
266
272
  "color": {
267
273
  "Name": {
268
274
  "Aldi S├╝d": "blue",
269
275
  "Edeka": "yellow",
270
276
  "Lidl": "red",
271
277
  "Netto": "orange",
272
- "Real": "black",
278
+ "Real": "darkblue",
273
279
  "Treff 3000": "fuchsia"
274
280
  }
275
281
  },
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: huff
3
- Version: 1.3.4
3
+ Version: 1.4.0
4
4
  Summary: huff: Huff Model Market Area Analysis
5
5
  Author: Thomas Wieland
6
6
  Author-email: geowieland@googlemail.com
@@ -34,6 +34,8 @@ See the /tests directory for usage examples of most of the included functions.
34
34
  - Defining origins and destinations with weightings
35
35
  - Creating interaction matrix from origins and destinations
36
36
  - Market simulation with basic Huff Model
37
+ - Different function types: power, exponential, logistic
38
+ - Huff model parameter estimation via Maximum Likelihood (ML)
37
39
  - **Multiplicative Competitive Interaction Model**:
38
40
  - Log-centering transformation of interaction matrix
39
41
  - Fitting MCI model with >= 2 independent variables
@@ -60,12 +62,16 @@ See the /tests directory for usage examples of most of the included functions.
60
62
 
61
63
 
62
64
  ## Literature
63
- - Huff DL (1962) *Determination of Intra-Urban Retail Trade Areas*.
65
+ - De Beule M, Van den Poel D, Van de Weghe N (2014) An extended Huff-model for robustly benchmarking and predicting retail network performance. *Applied Geography* 46(1): 80–89. [10.1016/j.apgeog.2013.09.026](https://doi.org/10.1016/j.apgeog.2013.09.026)
66
+ - Haines Jr GH, Simon LS, Alexis M (1972) Maximum Likelihood Estimation of Central-City Food Trading Areas. *Journal of Marketing Research* 9(2): 154-159. [10.2307/3149948](https://doi.org/10.2307/3149948)
67
+ - Huff DL (1962) *Determination of Intra-Urban Retail Trade Areas*. Real Estate Research Program, Graduate Schools of Business Administration, University of California.
68
+ - Huff DL (1963) A Probabilistic Analysis of Shopping Center Trade Areas. *Land Economics* 39(1): 81-90. [10.2307/3144521](https://doi.org/10.2307/3144521)
64
69
  - Huff DL (1964) Defining and estimating a trading area. *Journal of Marketing* 28(4): 34–38. [10.2307/1249154](https://doi.org/10.2307/1249154)
65
70
  - Huff DL, McCallum BM (2008) Calibrating the Huff Model using ArcGIS Business Analyst. ESRI White Paper, September 2008. https://www.esri.com/library/whitepapers/pdfs/calibrating-huff-model.pdf.
66
- - De Beule M, Van den Poel D, Van de Weghe N (2014) An extended Huff-model for robustly benchmarking and predicting retail network performance. *Applied Geography* 46(1): 80–89. [10.1016/j.apgeog.2013.09.026](https://doi.org/10.1016/j.apgeog.2013.09.026)
67
71
  - Nakanishi M, Cooper LG (1974) Parameter estimation for a Multiplicative Competitive Interaction Model: Least squares approach. *Journal of Marketing Research* 11(3): 303–311. [10.2307/3151146](https://doi.org/10.2307/3151146).
68
72
  - Nakanishi M, Cooper LG (1982) Technical Note — Simplified Estimation Procedures for MCI Models. *Marketing Science* 1(3): 314-322. [10.1287/mksc.1.3.314](https://doi.org/10.1287/mksc.1.3.314)
73
+ - Orpana T, Lampinen J (2003) Building Spatial Choice Models from Aggregate Data. *Journal of Regional Science* 43(2): 319-348. [10.1111/1467-9787.00301](https://doi.org/10.1111/1467-9787.00301)
74
+ - Wieland T (2015) *Nahversorgung im Kontext raumökonomischer Entwicklungen im Lebensmitteleinzelhandel: Konzeption und Durchführung einer GIS-gestützten Analyse der Strukturen des Lebensmitteleinzelhandels und der Nahversorgung in Freiburg im Breisgau*. Working paper. Göttingen. https://webdoc.sub.gwdg.de/pub/mon/2015/5-wieland.pdf.
69
75
  - Wieland T (2017) Market Area Analysis for Retail and Service Locations with MCI. *R Journal* 9(1): 298-323. [10.32614/RJ-2017-020](https://doi.org/10.32614/RJ-2017-020)
70
76
  - Wieland T (2018) A Hurdle Model Approach of Store Choice and Market Area Analysis in Grocery Retailing. *Papers in Applied Geography* 4(4): 370-389. [10.1080/23754931.2018.1519458](https://doi.org/10.1080/23754931.2018.1519458)
71
77
  - Wieland T (2023) Spatial shopping behavior during the Corona pandemic: insights from a micro-econometric store choice model for consumer electronics and furniture retailing in Germany. *Journal of Geographical Systems* 25(2): 291–326. [10.1007/s10109-023-00408-x](https://doi.org/10.1007/s10109-023-00408-x)
@@ -1,10 +1,10 @@
1
1
  huff/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- huff/gistools.py,sha256=PVeWvGzTmCXgIgzXGSElvCdaSV78_YcgYISgthyr6sI,6857
3
- huff/models.py,sha256=3jESNI57oS6dEFvf3bFoq4txfP1nkm3aFY4wNrYQMt4,66284
4
- huff/ors.py,sha256=2Fz-_gYmCt4U3Ic0MR7ws7XCPVotVJP3Hh1CkXuZL7g,11929
5
- huff/osm.py,sha256=ARazUc2ciJbXwT6wFiXxR8JaGcYP4cYpb7VXWQR9NRI,6862
2
+ huff/gistools.py,sha256=Bl_5hqjbXwVgUDDXV5Ti-vXhUsKiaMLIFKDDW7CqCd0,7038
3
+ huff/models.py,sha256=PBB58aO-4XIl-QvO-nfw6X8ElXhzUVsiEMG_PYzgI2M,75159
4
+ huff/ors.py,sha256=CATQf0s-6iq1VMnQFMlPA2AjyyYyApfarVXyzOtVNo0,11929
5
+ huff/osm.py,sha256=91spvaFVyKuGLbuRSx6mfi306Lt94J_scD_SQPwrggo,7747
6
6
  huff/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
- huff/tests/tests_huff.py,sha256=rXrZ_rQru_JWtBgFPb2KdkS4m9pudgldnQ7r08tTTkw,7795
7
+ huff/tests/tests_huff.py,sha256=AfvCjadGeGp6rHod9bTJQsCYk5K3YfwlVg1s16R_1BA,8149
8
8
  huff/tests/data/Haslach.cpg,sha256=OtMDH1UDpEBK-CUmLugjLMBNTqZoPULF3QovKiesmCQ,5
9
9
  huff/tests/data/Haslach.dbf,sha256=GVPIt05OzDO7UrRDcsMhiYWvyXAPg6Z-qkiysFzj-fc,506
10
10
  huff/tests/data/Haslach.prj,sha256=2Jy1Vlzh7UxQ1MXpZ9UYLs2SxfrObj2xkEkZyLqmGTY,437
@@ -18,7 +18,7 @@ huff/tests/data/Haslach_supermarkets.qmd,sha256=JlcOYzG4vI1NH1IuOpxwIPnJsCyC-pDR
18
18
  huff/tests/data/Haslach_supermarkets.shp,sha256=X7QbQ0BTMag_B-bDRbpr-go2BQIXo3Y8zMAKpYZmlps,324
19
19
  huff/tests/data/Haslach_supermarkets.shx,sha256=j23QHX-SmdAeN04rw0x8nUOran-OCg_T6r_LvzzEPWs,164
20
20
  huff/tests/data/Wieland2015.xlsx,sha256=SaVM-Hi5dBTmf2bzszMnZ2Ec8NUE05S_5F2lQj0ayS0,19641
21
- huff-1.3.4.dist-info/METADATA,sha256=Mq4YbIg7Js2_DK-d9v8nH6Soq_yXXrLmiqZcExfiouo,4558
22
- huff-1.3.4.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
23
- huff-1.3.4.dist-info/top_level.txt,sha256=nlzX-PxZNFmIxANIJMySuIFPihd6qOBkRlhIC28NEsQ,5
24
- huff-1.3.4.dist-info/RECORD,,
21
+ huff-1.4.0.dist-info/METADATA,sha256=OHL8b-FoJaADeRyUJDpQZoWYsm3Avm1bBxYak5rc6rI,5692
22
+ huff-1.4.0.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
23
+ huff-1.4.0.dist-info/top_level.txt,sha256=nlzX-PxZNFmIxANIJMySuIFPihd6qOBkRlhIC28NEsQ,5
24
+ huff-1.4.0.dist-info/RECORD,,
File without changes