huff 1.0.0__py3-none-any.whl → 1.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
huff/gistools.py CHANGED
@@ -4,8 +4,8 @@
4
4
  # Author: Thomas Wieland
5
5
  # ORCID: 0000-0001-5168-9846
6
6
  # mail: geowieland@googlemail.com
7
- # Version: 1.0.0
8
- # Last update: 2025-04-25 18:16
7
+ # Version: 1.1.1
8
+ # Last update: 2025-04-29 18:12
9
9
  # Copyright (c) 2025 Thomas Wieland
10
10
  #-----------------------------------------------------------------------
11
11
 
huff/models.py CHANGED
@@ -4,8 +4,8 @@
4
4
  # Author: Thomas Wieland
5
5
  # ORCID: 0000-0001-5168-9846
6
6
  # mail: geowieland@googlemail.com
7
- # Version: 1.0.0
8
- # Last update: 2025-04-25 18:13
7
+ # Version: 1.1.1
8
+ # Last update: 2025-04-29 18:12
9
9
  # Copyright (c) 2025 Thomas Wieland
10
10
  #-----------------------------------------------------------------------
11
11
 
@@ -13,7 +13,9 @@
13
13
  import pandas as pd
14
14
  import geopandas as gp
15
15
  import numpy as np
16
- from .ors import matrix
16
+ import time
17
+ from huff.ors import Client, TimeDistanceMatrix, Isochrone
18
+ from huff.gistools import overlay_difference
17
19
 
18
20
 
19
21
  class CustomerOrigins:
@@ -198,28 +200,142 @@ class SupplyLocations:
198
200
  self.metadata = metadata
199
201
 
200
202
  return self
203
+
204
+ def add_new_destinations(
205
+ self,
206
+ new_destinations,
207
+ ):
208
+
209
+ geodata_gpd_original = self.get_geodata_gpd_original()
210
+ geodata_gpd = self.get_geodata_gpd()
211
+ metadata = self.get_metadata()
212
+
213
+ new_destinations_gpd_original = new_destinations.get_geodata_gpd_original()
214
+ new_destinations_gpd = new_destinations.get_geodata_gpd()
215
+ new_destinations_metadata = new_destinations.get_metadata()
216
+
217
+ if list(new_destinations_gpd_original.columns) != list(geodata_gpd_original.columns):
218
+ raise KeyError("Supply locations and new destinations data have different column names.")
219
+ if list(new_destinations_gpd.columns) != list(geodata_gpd.columns):
220
+ raise KeyError("Supply locations and new destinations data have different column names.")
221
+
222
+ geodata_gpd_original = geodata_gpd_original.append(
223
+ new_destinations_gpd_original,
224
+ ignore_index=True
225
+ )
226
+
227
+ geodata_gpd = geodata_gpd.append(
228
+ new_destinations_gpd,
229
+ ignore_index=True
230
+ )
231
+
232
+ metadata["no_points"] = metadata["no_points"]+new_destinations_metadata["no_points"]
233
+
234
+ self.geodata_gpd = geodata_gpd
235
+ self.geodata_gpd_original = geodata_gpd_original
236
+ self.metadata = metadata
237
+
238
+ return self
239
+
240
+ def isochrones(
241
+ self,
242
+ segments: list = [900, 600, 300],
243
+ range_type: str = "time",
244
+ intersections: str = "true",
245
+ profile: str = "driving-car",
246
+ donut: bool = True,
247
+ ors_server: str = "https://api.openrouteservice.org/v2/",
248
+ ors_auth: str = None,
249
+ timeout = 10,
250
+ delay = 1,
251
+ save_output: bool = True,
252
+ output_filepath: str = "isochrones.shp",
253
+ output_crs: str = "EPSG:4326"
254
+ ):
255
+
256
+ geodata_gpd = self.get_geodata_gpd()
257
+ metadata = self.get_metadata()
258
+
259
+ coords = [(point.x, point.y) for point in geodata_gpd.geometry]
260
+
261
+ unique_id_col = metadata["unique_id"]
262
+ unique_id_values = geodata_gpd[unique_id_col].values
263
+
264
+ ors_client = Client(
265
+ server = ors_server,
266
+ auth = ors_auth
267
+ )
268
+
269
+ isochrones_gdf = gp.GeoDataFrame(columns=[unique_id_col, "geometry"])
270
+
271
+ i = 0
272
+
273
+ for x, y in coords:
274
+
275
+ isochrone_output = ors_client.isochrone(
276
+ locations = [[x, y]],
277
+ segments = segments,
278
+ range_type = range_type,
279
+ intersections = intersections,
280
+ profile = profile,
281
+ timeout = timeout,
282
+ save_output = False,
283
+ output_crs = output_crs
284
+ )
285
+
286
+ if isochrone_output.status_code != 200:
287
+ continue
288
+
289
+ isochrone_gdf = isochrone_output.get_isochrones_gdf()
290
+
291
+ if donut:
292
+ isochrone_gdf = overlay_difference(
293
+ polygon_gdf = isochrone_gdf,
294
+ sort_col = "segment"
295
+ )
296
+
297
+ time.sleep(delay)
298
+
299
+ isochrone_gdf[unique_id_col] = unique_id_values[i]
300
+
301
+ isochrones_gdf = pd.concat(
302
+ [
303
+ isochrones_gdf,
304
+ isochrone_gdf
305
+ ],
306
+ ignore_index=True
307
+ )
308
+
309
+ i = i+1
310
+
311
+ isochrones_gdf.set_crs(
312
+ output_crs,
313
+ allow_override=True,
314
+ inplace=True
315
+ )
316
+
317
+ if save_output:
318
+
319
+ isochrones_gdf.to_file(filename = output_filepath)
320
+
321
+ return isochrones_gdf
201
322
 
202
323
  class InteractionMatrix:
203
324
 
204
325
  def __init__(
205
326
  self,
206
327
  interaction_matrix_df,
207
- market_areas_df,
208
328
  customer_origins,
209
329
  supply_locations
210
330
  ):
211
331
 
212
332
  self.interaction_matrix_df = interaction_matrix_df
213
- self.market_areas_df = market_areas_df
214
333
  self.customer_origins = customer_origins
215
334
  self.supply_locations = supply_locations
216
335
 
217
336
  def get_interaction_matrix_df(self):
218
337
  return self.interaction_matrix_df
219
338
 
220
- def get_market_areas_df(self):
221
- return self.market_areas_df
222
-
223
339
  def get_customer_origins(self):
224
340
  return self.customer_origins
225
341
 
@@ -231,7 +347,7 @@ class InteractionMatrix:
231
347
  customer_origins_metadata = self.get_customer_origins().get_metadata()
232
348
  supply_locations_metadata = self.get_supply_locations().get_metadata()
233
349
 
234
- print("Huff Model")
350
+ print("Huff Model Interaction Matrix")
235
351
  print("----------------------------------")
236
352
  print("Supply locations " + str(supply_locations_metadata["no_points"]))
237
353
  if supply_locations_metadata["attraction_col"][0] is None:
@@ -259,6 +375,7 @@ class InteractionMatrix:
259
375
  self,
260
376
  range_type: str = "time",
261
377
  time_unit: str = "minutes",
378
+ ors_server: str = "https://api.openrouteservice.org/v2/",
262
379
  ors_auth: str = None,
263
380
  save_output: bool = False,
264
381
  output_filepath: str = "transport_costs_matrix.csv"
@@ -285,8 +402,11 @@ class InteractionMatrix:
285
402
  customer_origins_index = list(range(len(customer_origins_coords)))
286
403
  locations_coords_index = list(range(len(customer_origins_index), len(locations_coords)))
287
404
 
288
- time_distance_matrix = matrix(
289
- auth = ors_auth,
405
+ ors_client = Client(
406
+ server = ors_server,
407
+ auth = ors_auth
408
+ )
409
+ time_distance_matrix = ors_client.matrix(
290
410
  locations = locations_coords,
291
411
  save_output = save_output,
292
412
  output_filepath = output_filepath,
@@ -295,6 +415,9 @@ class InteractionMatrix:
295
415
  range_type = range_type
296
416
  )
297
417
 
418
+ if time_distance_matrix.get_metadata() is None:
419
+ raise ValueError ("No transport costs matrix was built.")
420
+
298
421
  transport_costs_matrix = time_distance_matrix.get_matrix()
299
422
  transport_costs_matrix_config = time_distance_matrix.get_config()
300
423
  range_type = transport_costs_matrix_config["range_type"]
@@ -339,6 +462,11 @@ class InteractionMatrix:
339
462
  if interaction_matrix_df["A_j"].isna().all():
340
463
  raise ValueError ("Attraction variable is not defined")
341
464
 
465
+ check_vars(
466
+ df = interaction_matrix_df,
467
+ cols = ["A_j", "t_ij"]
468
+ )
469
+
342
470
  customer_origins = self.customer_origins
343
471
  customer_origins_metadata = customer_origins.get_metadata()
344
472
  tc_weighting = customer_origins_metadata["weighting"][0]
@@ -400,6 +528,11 @@ class InteractionMatrix:
400
528
  if interaction_matrix_df["C_i"].isna().all():
401
529
  raise ValueError ("Market size column in customer origins not defined. Use CustomerOrigins.define_marketsize()")
402
530
 
531
+ check_vars(
532
+ df = interaction_matrix_df,
533
+ cols = ["C_i"]
534
+ )
535
+
403
536
  if interaction_matrix_df["p_ij"].isna().all():
404
537
  self.probabilities()
405
538
  interaction_matrix_df = self.interaction_matrix_df
@@ -413,17 +546,110 @@ class InteractionMatrix:
413
546
  def marketareas (self):
414
547
 
415
548
  interaction_matrix_df = self.interaction_matrix_df
416
-
549
+
550
+ check_vars(
551
+ df = interaction_matrix_df,
552
+ cols = ["E_ij"]
553
+ )
554
+
417
555
  market_areas_df = pd.DataFrame(interaction_matrix_df.groupby("j")["E_ij"].sum())
418
556
  market_areas_df = market_areas_df.reset_index(drop=False)
419
557
  market_areas_df = market_areas_df.rename(columns={"E_ij": "T_j"})
420
558
 
421
- print(market_areas_df)
559
+ huff_model = HuffModel(
560
+ self,
561
+ market_areas_df
562
+ )
563
+
564
+ return huff_model
565
+
566
+ def mci_transformation(
567
+ self,
568
+ cols: list = ["A_j", "t_ij"]
569
+ ):
422
570
 
423
- self.market_areas_df = market_areas_df
571
+ """ MCI model log-centering transformation """
572
+
573
+ cols = cols + ["p_ij"]
574
+
575
+ interaction_matrix_df = self.interaction_matrix_df
576
+
577
+ interaction_matrix_df = mci_transformation(
578
+ df = interaction_matrix_df,
579
+ ref_col = "i",
580
+ cols = cols
581
+ )
582
+
583
+ self.interaction_matrix_df = interaction_matrix_df
424
584
 
425
585
  return self
426
586
 
587
+ class HuffModel:
588
+
589
+ def __init__(
590
+ self,
591
+ interaction_matrix,
592
+ market_areas_df
593
+ ):
594
+
595
+ self.interaction_matrix = interaction_matrix
596
+ self.market_areas_df = market_areas_df
597
+
598
+ def get_interaction_matrix_df(self):
599
+
600
+ interaction_matrix = self.interaction_matrix
601
+ interaction_matrix_df = interaction_matrix.get_interaction_matrix_df()
602
+
603
+ return interaction_matrix_df
604
+
605
+ def get_supply_locations(self):
606
+
607
+ interaction_matrix = self.interaction_matrix
608
+ supply_locations = interaction_matrix.get_supply_locations()
609
+
610
+ return supply_locations
611
+
612
+ def get_customer_origins(self):
613
+
614
+ interaction_matrix = self.interaction_matrix
615
+ customer_origins = interaction_matrix.get_customer_origins()
616
+
617
+ return customer_origins
618
+
619
+ def get_market_areas_df(self):
620
+ return self.market_areas_df
621
+
622
+ def summary(self):
623
+
624
+ interaction_matrix = self.interaction_matrix
625
+
626
+ customer_origins_metadata = interaction_matrix.get_customer_origins().get_metadata()
627
+ supply_locations_metadata = interaction_matrix.get_supply_locations().get_metadata()
628
+
629
+ print("Huff Model")
630
+ print("----------------------------------")
631
+ print("Supply locations " + str(supply_locations_metadata["no_points"]))
632
+ if supply_locations_metadata["attraction_col"][0] is None:
633
+ print("Attraction column not defined")
634
+ else:
635
+ print("Attraction column " + supply_locations_metadata["attraction_col"][0])
636
+ print("Customer origins " + str(customer_origins_metadata["no_points"]))
637
+ if customer_origins_metadata["marketsize_col"] is None:
638
+ print("Market size column not defined")
639
+ else:
640
+ print("Market size column " + customer_origins_metadata["marketsize_col"])
641
+ print("----------------------------------")
642
+ print("Weights")
643
+ if supply_locations_metadata["weighting"][0]["func"] is None and supply_locations_metadata["weighting"][0]["param"] is None:
644
+ print("Gamma not defined")
645
+ else:
646
+ print("Gamma " + str(supply_locations_metadata["weighting"][0]["param"]) + " (" + supply_locations_metadata["weighting"][0]["func"] + ")")
647
+ if customer_origins_metadata["weighting"][0]["func"] is None and customer_origins_metadata["weighting"][0]["param"] is None:
648
+ print("Lambda not defined")
649
+ else:
650
+ print("Lambda " + str(customer_origins_metadata["weighting"][0]["param"]) + " (" + customer_origins_metadata["weighting"][0]["func"] + ")")
651
+ print("----------------------------------")
652
+
427
653
  def load_geodata (
428
654
  file,
429
655
  location_type: str,
@@ -438,7 +664,7 @@ def load_geodata (
438
664
  ):
439
665
 
440
666
  if location_type is None or (location_type != "origins" and location_type != "destinations"):
441
- raise ValueError ("location_type must be either 'loc' or 'dest'")
667
+ raise ValueError ("location_type must be either 'origins' or 'destinations'")
442
668
 
443
669
  if data_type not in ["shp", "csv", "xlsx"]:
444
670
  raise ValueError ("data_type must be 'shp', 'csv' or 'xlsx'")
@@ -517,7 +743,7 @@ def create_interaction_matrix(
517
743
  if not isinstance(customer_origins, CustomerOrigins):
518
744
  raise ValueError ("customer_origins must be of class CustomerOrigins")
519
745
  if not isinstance(supply_locations, SupplyLocations):
520
- raise ValueError ("supply_locations must be of class supply_locations")
746
+ raise ValueError ("supply_locations must be of class SupplyLocations")
521
747
 
522
748
  customer_origins_metadata = customer_origins.get_metadata()
523
749
  if customer_origins_metadata["marketsize_col"] is None:
@@ -573,13 +799,53 @@ def create_interaction_matrix(
573
799
  interaction_matrix_df["p_ij"] = None
574
800
  interaction_matrix_df["E_ij"] = None
575
801
 
576
- market_areas_df = None
577
-
578
802
  interaction_matrix = InteractionMatrix(
579
803
  interaction_matrix_df,
580
- market_areas_df,
581
804
  customer_origins,
582
805
  supply_locations
583
806
  )
584
807
 
585
- return interaction_matrix
808
+ return interaction_matrix
809
+
810
+ def check_vars(
811
+ df: pd.DataFrame,
812
+ cols: list
813
+ ):
814
+
815
+ for col in cols:
816
+ if col not in df.columns:
817
+ raise KeyError(f"Column '{col}' not in dataframe.")
818
+
819
+ for col in cols:
820
+ if not pd.api.types.is_numeric_dtype(df[col]):
821
+ raise ValueError(f"Column '{col}' is not numeric. All columns must be numeric.")
822
+
823
+ for col in cols:
824
+ if (df[col] <= 0).any():
825
+ raise ValueError(f"Column '{col}' includes values <= 0. All values must be numeric and positive.")
826
+
827
+ def mci_transformation(
828
+ df: pd.DataFrame,
829
+ ref_col: str,
830
+ cols: list
831
+ ):
832
+
833
+ check_vars(
834
+ df = df,
835
+ cols = cols + [ref_col]
836
+ )
837
+
838
+ def lct (x):
839
+
840
+ x_geom = np.exp(np.log(x).mean())
841
+ x_lct = np.log(x/x_geom)
842
+
843
+ return x_lct
844
+
845
+ for var in cols:
846
+
847
+ var_t = df.groupby(ref_col)[var].apply(lct)
848
+ var_t = var_t.reset_index()
849
+ df[var+"_t"] = var_t[var]
850
+
851
+ return df
huff/ors.py CHANGED
@@ -4,8 +4,8 @@
4
4
  # Author: Thomas Wieland
5
5
  # ORCID: 0000-0001-5168-9846
6
6
  # mail: geowieland@googlemail.com
7
- # Version: 1.0.0
8
- # Last update: 2025-04-25 18:14
7
+ # Version: 1.1.1
8
+ # Last update: 2025-04-29 18:12
9
9
  # Copyright (c) 2025 Thomas Wieland
10
10
  #-----------------------------------------------------------------------
11
11
 
@@ -32,22 +32,31 @@ class Isochrone:
32
32
  self.status_code = status_code
33
33
  self.save_config = save_config
34
34
 
35
+ def get_isochrones_gdf(self):
36
+
37
+ isochrones_gdf = self.isochrones_gdf
38
+ return isochrones_gdf
39
+
35
40
  def summary(self):
36
41
 
37
42
  metadata = self.metadata
38
43
  status_code = self.status_code
39
44
 
40
- range_str = [str(range) for range in metadata["query"]["range"]]
41
- profile = metadata["query"]["profile"]
42
- range_type = metadata["query"]["range_type"]
43
- no_locations = len(metadata["query"]["locations"]) #[str(seg) for seg in metadata["query"]["locations"]]
44
-
45
- print("Locations: " + str(no_locations))
46
- print("Segments: " + ", ".join(range_str))
47
- print("Range type: " + range_type)
48
- print("Profile: " + profile)
49
- print("Status code: " + str(status_code))
45
+ if metadata is not None:
46
+ range_str = [str(range) for range in metadata["query"]["range"]]
47
+ profile = metadata["query"]["profile"]
48
+ range_type = metadata["query"]["range_type"]
49
+ no_locations = len(metadata["query"]["locations"])
50
50
 
51
+ print("Locations " + str(no_locations))
52
+ print("Segments " + ", ".join(range_str))
53
+ print("Range type " + range_type)
54
+ print("Profile " + profile)
55
+
56
+ else:
57
+ print("No isochrones were built.")
58
+
59
+ print("Status code " + str(status_code))
51
60
 
52
61
  class TimeDistanceMatrix:
53
62
 
@@ -81,254 +90,294 @@ class TimeDistanceMatrix:
81
90
  metadata = self.metadata
82
91
  status_code = self.status_code
83
92
 
84
- pass # TODO ??
85
-
86
-
87
- def isochrone(
88
- auth: str,
89
- locations: list,
90
- id: list = [],
91
- segments: list = [900, 600, 300],
92
- range_type: str = "time",
93
- intersections: str = "true",
94
- profile: str = "driving-car",
95
- save_output: bool = True,
96
- output_filepath: str = "isochrones.shp",
97
- output_crs: str = "EPSG:2056",
98
- verbose: bool = True
99
- ):
100
-
101
- ors_url = "https://api.openrouteservice.org/v2/isochrones/" + profile
102
-
103
- headers = {
104
- "Content-Type": "application/json; charset=utf-8",
105
- "Accept": "application/json, application/geo+json, application/gpx+xml, img/png; charset=utf-8",
106
- "Authorization": auth
107
- }
108
-
109
- body = {
110
- "locations": locations,
111
- "range": segments,
112
- "intersections": intersections,
113
- "range_type": range_type
114
- }
115
-
116
- try:
117
-
118
- response = requests.post(
119
- ors_url,
120
- headers=headers,
121
- json=body
122
- )
93
+ config = self.save_config
94
+
95
+ if metadata is not None:
96
+
97
+ profile = metadata["query"]["profile"]
98
+ no_locations = len(metadata["query"]["locations"])
99
+ range_type = config["range_type"]
100
+
101
+ print("Locations " + str(no_locations))
102
+ print("Range type " + range_type)
103
+ print("Profile " + profile)
104
+ else:
105
+ print("No time/distance matrix was built.")
106
+ print("Status code " + str(status_code))
107
+
108
+ class Client:
109
+
110
+ def __init__(
111
+ self,
112
+ server = "https://api.openrouteservice.org/v2/",
113
+ auth: str = None
114
+ ):
123
115
 
124
- except:
116
+ self.server = server
117
+ self.auth = auth
118
+
119
+ def isochrone(
120
+ self,
121
+ locations: list,
122
+ id: list = [],
123
+ segments: list = [900, 600, 300],
124
+ range_type: str = "time",
125
+ intersections: str = "true",
126
+ profile: str = "driving-car",
127
+ timeout = 10,
128
+ save_output: bool = True,
129
+ output_filepath: str = "isochrones.shp",
130
+ output_crs: str = "EPSG:4326"
131
+ ):
132
+
133
+ ors_url = self.server + "isochrones/" + profile
134
+ auth = self.auth
135
+
136
+ headers = {
137
+ "Content-Type": "application/json; charset=utf-8",
138
+ "Accept": "application/json, application/geo+json, application/gpx+xml, img/png; charset=utf-8",
139
+ "Authorization": auth
140
+ }
141
+
142
+ body = {
143
+ "locations": locations,
144
+ "range": segments,
145
+ "intersections": intersections,
146
+ "range_type": range_type
147
+ }
148
+
149
+ save_config = {
150
+ "range_type": range_type,
151
+ "save_output": save_output,
152
+ "output_filepath" : output_filepath,
153
+ "output_crs": output_crs
154
+ }
155
+
156
+ try:
157
+
158
+ response = requests.post(
159
+ ors_url,
160
+ headers=headers,
161
+ json=body,
162
+ timeout=timeout
163
+ )
164
+
165
+ except:
125
166
 
126
- if verbose is True:
127
167
  print ("Unknown error while accessing ORS server")
128
-
129
- status_code = 99999
130
- isochrones_gdf = None
131
- metadata = None
168
+
169
+ status_code = 99999
170
+ isochrones_gdf = None
171
+ metadata = None
172
+
173
+ isochrone_output = Isochrone(
174
+ isochrones_gdf,
175
+ metadata,
176
+ status_code,
177
+ save_config
178
+ )
179
+
180
+ return isochrone_output
132
181
 
133
- status_code = response.status_code
182
+ status_code = response.status_code
134
183
 
135
- if status_code == 200:
184
+ if status_code == 200:
136
185
 
137
- if verbose is True:
138
186
  print ("Accessing ORS server successful")
139
187
 
140
- response_json = response.json()
141
-
142
- metadata = response_json["metadata"]
143
-
144
- features = response_json["features"]
145
- geometries = [shape(feature["geometry"]) for feature in features]
188
+ response_json = response.json()
189
+
190
+ metadata = response_json["metadata"]
191
+
192
+ features = response_json["features"]
193
+ geometries = [shape(feature["geometry"]) for feature in features]
146
194
 
147
- isochrones_gdf = gp.GeoDataFrame(
148
- features,
149
- geometry=geometries,
150
- crs="EPSG:4326"
151
- )
195
+ isochrones_gdf = gp.GeoDataFrame(
196
+ features,
197
+ geometry=geometries,
198
+ crs="EPSG:4326"
199
+ )
152
200
 
153
- isochrones_gdf["segment"] = 0
154
- isochrones_gdf_properties_dict = dict(isochrones_gdf["properties"])
201
+ isochrones_gdf["segment"] = 0
202
+ isochrones_gdf_properties_dict = dict(isochrones_gdf["properties"])
155
203
 
156
- for i in range(len(isochrones_gdf_properties_dict)):
157
- isochrones_gdf.iloc[i,3] = isochrones_gdf_properties_dict[i]["value"]
204
+ for i in range(len(isochrones_gdf_properties_dict)):
205
+ isochrones_gdf.iloc[i,3] = isochrones_gdf_properties_dict[i]["value"]
158
206
 
159
- isochrones_gdf = isochrones_gdf.drop(columns=["properties"])
160
- isochrones_gdf = isochrones_gdf.to_crs(output_crs)
207
+ isochrones_gdf = isochrones_gdf.drop(columns=["properties"])
208
+ isochrones_gdf = isochrones_gdf.to_crs(output_crs)
161
209
 
162
- if save_output:
163
- isochrones_gdf.to_file(output_filepath)
164
- print ("Saved as", output_filepath)
210
+ if save_output:
211
+ isochrones_gdf.to_file(output_filepath)
212
+ print ("Saved as", output_filepath)
165
213
 
166
- else:
214
+ else:
167
215
 
168
- if verbose is True:
169
216
  print ("Error while accessing ORS server. Status Code: " + str(status_code))
170
217
 
171
- isochrones_gdf = None
172
- metadata = None
173
-
174
- save_config = {
175
- "range_type": range_type,
176
- "save_output": save_output,
177
- "output_filepath" : output_filepath,
178
- "output_crs": output_crs
179
- }
180
-
181
- isochrone_output = Isochrone(
182
- isochrones_gdf,
183
- metadata,
184
- status_code,
185
- save_config
186
- )
187
-
188
- return isochrone_output
189
-
190
- def matrix(
191
- auth,
192
- locations: list,
193
- sources: list = [],
194
- destinations: list = [],
195
- id: str = None,
196
- range_type = "time",
197
- metrics: list = [],
198
- resolve_locations: bool = False,
199
- units: str = "mi",
200
- save_output = False,
201
- output_filepath = "matrix.csv",
202
- csv_sep = ";",
203
- csv_decimal = ",",
204
- csv_encoding = None,
205
- verbose = True
206
- ):
207
-
208
- ors_url = "https://api.openrouteservice.org/v2/matrix/driving-car"
209
-
210
- headers = {
211
- "Content-Type": "application/json; charset=utf-8",
212
- "Accept": "application/json, application/geo+json, application/gpx+xml, img/png; charset=utf-8",
213
- "Authorization": auth
214
- }
215
-
216
- body = {
217
- "locations": locations,
218
- "resolve_locations": resolve_locations
219
- }
220
- if id is not None:
221
- body["id"] = id
222
- if metrics != []:
223
- body["metrics"] = metrics
224
- if sources != []:
225
- body["sources"] = sources
226
- if destinations != []:
227
- body["destinations"] = destinations
228
- if units is not None:
229
- body["units"] = units
230
-
231
- try:
232
-
233
- response = requests.post(
234
- ors_url,
235
- headers=headers,
236
- json=body
237
- )
238
-
239
- except:
218
+ isochrones_gdf = None
219
+ metadata = None
240
220
 
241
- if verbose is True:
242
- print ("Unknown error while accessing ORS server")
243
-
244
- status_code = 99999
245
- matrix_df = None
246
- metadata = None
221
+ isochrone_output = Isochrone(
222
+ isochrones_gdf,
223
+ metadata,
224
+ status_code,
225
+ save_config
226
+ )
247
227
 
248
- status_code = response.status_code
228
+ return isochrone_output
229
+
230
+ def matrix(
231
+ self,
232
+ locations: list,
233
+ sources: list = [],
234
+ destinations: list = [],
235
+ id: str = None,
236
+ range_type = "time",
237
+ profile = "driving-car",
238
+ metrics: list = [],
239
+ resolve_locations: bool = False,
240
+ units: str = "mi",
241
+ timeout = 10,
242
+ save_output = False,
243
+ output_filepath = "matrix.csv",
244
+ csv_sep = ";",
245
+ csv_decimal = ",",
246
+ csv_encoding = None
247
+ ):
249
248
 
250
- if status_code == 200:
249
+ ors_url = self.server + "matrix/" + profile
250
+ auth = self.auth
251
251
 
252
- if verbose is True:
253
- print ("Accessing ORS server successful")
252
+ headers = {
253
+ "Content-Type": "application/json; charset=utf-8",
254
+ "Accept": "application/json, application/geo+json, application/gpx+xml, img/png; charset=utf-8",
255
+ "Authorization": auth
256
+ }
257
+
258
+ body = {
259
+ "locations": locations,
260
+ "resolve_locations": resolve_locations
261
+ }
262
+ if id is not None:
263
+ body["id"] = id
264
+ if metrics != []:
265
+ body["metrics"] = metrics
266
+ if sources != []:
267
+ body["sources"] = sources
268
+ if destinations != []:
269
+ body["destinations"] = destinations
270
+ if units is not None:
271
+ body["units"] = units
272
+
273
+ save_config = {
274
+ "range_type": range_type,
275
+ "save_output": save_output,
276
+ "output_filepath": output_filepath
277
+ }
254
278
 
255
- response_json = response.json()
256
-
257
- metadata = response_json["metadata"]
258
-
259
- matrix_df = pd.DataFrame(
260
- columns=[
261
- "source",
262
- "source_lat",
263
- "source_lon",
264
- "source_snapped_distance",
265
- "destination",
266
- "destination_lat",
267
- "destination_lon",
268
- "destination_snapped_distance",
269
- "source_destination",
270
- range_type
271
- ])
272
-
273
- for i, value in enumerate(response_json["durations"]):
274
-
275
- source_lat = response_json["sources"][i]["location"][1]
276
- source_lon = response_json["sources"][i]["location"][0]
277
- source_snapped_distance = response_json["sources"][i]["snapped_distance"]
279
+ try:
280
+
281
+ response = requests.post(
282
+ ors_url,
283
+ headers=headers,
284
+ json=body,
285
+ timeout=timeout
286
+ )
287
+
288
+ except:
289
+
290
+ print ("Unknown error while accessing ORS server")
278
291
 
279
- for j, entry in enumerate(value):
280
-
281
- destination_lat = response_json["destinations"][j]["location"][1]
282
- destination_lon = response_json["destinations"][j]["location"][0]
283
- destination_snapped_distance = response_json["destinations"][j]["snapped_distance"]
284
-
285
- matrix_row = pd.Series(
286
- {
287
- "source": str(i),
288
- "source_lat": source_lat,
289
- "source_lon": source_lon,
290
- "source_snapped_distance": source_snapped_distance,
291
- "destination": str(j),
292
- "destination_lat": destination_lat,
293
- "destination_lon": destination_lon,
294
- "destination_snapped_distance": destination_snapped_distance,
295
- "source_destination": str(i)+"_"+str(j),
296
- range_type: entry
297
- }
298
- )
299
-
300
- matrix_df = pd.concat([
301
- matrix_df,
302
- pd.DataFrame([matrix_row])])
303
-
304
- if save_output:
305
- matrix_df.to_csv(
306
- output_filepath,
307
- decimal = csv_decimal,
308
- sep = csv_sep,
309
- encoding = csv_encoding
292
+ status_code = 99999
293
+ matrix_df = None
294
+ metadata = None
295
+
296
+ matrix_output = TimeDistanceMatrix(
297
+ matrix_df,
298
+ metadata,
299
+ status_code,
300
+ save_config
310
301
  )
311
- print ("Saved as", output_filepath)
312
302
 
313
- else:
303
+ return matrix_output
314
304
 
315
- if verbose is True:
316
- print ("Error in accessing ORS server. Status Code: " + str(status_code))
305
+ status_code = response.status_code
317
306
 
318
- matrix_df = None
319
- metadata = None
307
+ if status_code == 200:
320
308
 
321
- save_config = {
322
- "range_type": range_type,
323
- "save_output": save_output,
324
- "output_filepath": output_filepath
325
- }
309
+ print ("Accessing ORS server successful")
326
310
 
327
- matrix_output = TimeDistanceMatrix(
328
- matrix_df,
329
- metadata,
330
- status_code,
331
- save_config
332
- )
333
-
334
- return matrix_output
311
+ response_json = response.json()
312
+
313
+ metadata = response_json["metadata"]
314
+
315
+ matrix_df = pd.DataFrame(
316
+ columns=[
317
+ "source",
318
+ "source_lat",
319
+ "source_lon",
320
+ "source_snapped_distance",
321
+ "destination",
322
+ "destination_lat",
323
+ "destination_lon",
324
+ "destination_snapped_distance",
325
+ "source_destination",
326
+ range_type
327
+ ])
328
+
329
+ for i, value in enumerate(response_json["durations"]):
330
+
331
+ source_lat = response_json["sources"][i]["location"][1]
332
+ source_lon = response_json["sources"][i]["location"][0]
333
+ source_snapped_distance = response_json["sources"][i]["snapped_distance"]
334
+
335
+ for j, entry in enumerate(value):
336
+
337
+ destination_lat = response_json["destinations"][j]["location"][1]
338
+ destination_lon = response_json["destinations"][j]["location"][0]
339
+ destination_snapped_distance = response_json["destinations"][j]["snapped_distance"]
340
+
341
+ matrix_row = pd.Series(
342
+ {
343
+ "source": str(i),
344
+ "source_lat": source_lat,
345
+ "source_lon": source_lon,
346
+ "source_snapped_distance": source_snapped_distance,
347
+ "destination": str(j),
348
+ "destination_lat": destination_lat,
349
+ "destination_lon": destination_lon,
350
+ "destination_snapped_distance": destination_snapped_distance,
351
+ "source_destination": str(i)+"_"+str(j),
352
+ range_type: entry
353
+ }
354
+ )
355
+
356
+ matrix_df = pd.concat([
357
+ matrix_df,
358
+ pd.DataFrame([matrix_row])])
359
+
360
+ if save_output:
361
+ matrix_df.to_csv(
362
+ output_filepath,
363
+ decimal = csv_decimal,
364
+ sep = csv_sep,
365
+ encoding = csv_encoding
366
+ )
367
+ print ("Saved as", output_filepath)
368
+
369
+ else:
370
+
371
+ print ("Error in accessing ORS server. Status Code: " + str(status_code))
372
+
373
+ matrix_df = None
374
+ metadata = None
375
+
376
+ matrix_output = TimeDistanceMatrix(
377
+ matrix_df,
378
+ metadata,
379
+ status_code,
380
+ save_config
381
+ )
382
+
383
+ return matrix_output
huff/tests/tests_huff.py CHANGED
@@ -4,25 +4,28 @@
4
4
  # Author: Thomas Wieland
5
5
  # ORCID: 0000-0001-5168-9846
6
6
  # mail: geowieland@googlemail.com
7
- # Version: 1.0.0
8
- # Last update: 2025-04-25 18:08
7
+ # Version: 1.1.1
8
+ # Last update: 2025-04-29 18:12
9
9
  # Copyright (c) 2025 Thomas Wieland
10
10
  #-----------------------------------------------------------------------
11
11
 
12
12
 
13
- from ..ors import isochrone, matrix
14
- from ..models import load_geodata, create_interaction_matrix
13
+ from huff.ors import Client
14
+ from huff.models import load_geodata, create_interaction_matrix
15
15
 
16
16
 
17
17
  # Isochrones test:
18
18
 
19
19
  output_path = "."
20
20
 
21
- isochrone_ORS = isochrone (
22
- auth = "5b3ce3597851110001cf62480a15aafdb5a64f4d91805929f8af6abd",
23
- locations =[[7.593301534652711, 47.54329763735186], [9.207916,49.153868]],
21
+ ors_client = Client(
22
+ auth = "5b3ce3597851110001cf62480a15aafdb5a64f4d91805929f8af6abd"
23
+ )
24
+
25
+ isochrone_ORS = ors_client.isochrone (
26
+ locations = [[7.593301534652711, 47.54329763735186], [9.207916,49.153868]],
24
27
  save_output = True,
25
- output_filepath = "test.shp",
28
+ output_filepath = "test_isochrones.shp",
26
29
  intersections="false"
27
30
  )
28
31
 
@@ -30,19 +33,19 @@ isochrone_ORS.summary()
30
33
 
31
34
  # Matrix test:
32
35
 
33
- matrix_ORS = matrix(auth="5b3ce3597851110001cf62480a15aafdb5a64f4d91805929f8af6abd",
36
+ matrix_ORS = ors_client.matrix(
34
37
  locations=[[9.70093,48.477473],[9.207916,49.153868],[37.573242,55.801281],[115.663757,38.106467]],
35
38
  save_output=True,
36
- output_filepath="testmatrix.csv"
39
+ output_filepath="test_matrix.csv"
37
40
  )
38
41
 
39
- print(matrix_ORS)
42
+ matrix_ORS.summary()
40
43
 
41
44
 
42
45
  # Huff model test data:
43
46
 
44
47
  Haslach = load_geodata(
45
- "huff/tests/data/Haslach.shp",
48
+ "data/Haslach.shp",
46
49
  location_type="origins",
47
50
  unique_id="BEZEICHN"
48
51
  )
@@ -51,22 +54,32 @@ Haslach.summary()
51
54
 
52
55
  Haslach.define_marketsize("pop")
53
56
 
54
- Haslach.define_transportcosts_weighting()
57
+ Haslach.define_transportcosts_weighting(
58
+ param_lambda=-2.2
59
+ )
55
60
 
56
61
  Haslach.summary()
57
62
 
58
63
 
59
64
  Haslach_supermarkets = load_geodata(
60
- "huff/tests/data/Haslach_supermarkets.shp",
65
+ "data/Haslach_supermarkets.shp",
61
66
  location_type="destinations",
62
67
  unique_id="LFDNR"
63
68
  )
64
69
 
65
70
  Haslach_supermarkets.summary()
66
71
 
72
+ Haslach_supermarkets.isochrones(
73
+ save_output=True,
74
+ ors_auth = "5b3ce3597851110001cf62480a15aafdb5a64f4d91805929f8af6abd",
75
+ output_filepath="Haslach_supermarkets_iso.shp"
76
+ )
77
+
67
78
  Haslach_supermarkets.define_attraction("VKF_qm")
68
79
 
69
- Haslach_supermarkets.define_attraction_weighting()
80
+ Haslach_supermarkets.define_attraction_weighting(
81
+ param_gamma=0.9
82
+ )
70
83
 
71
84
  Haslach_supermarkets.summary()
72
85
 
@@ -82,8 +95,8 @@ interaction_matrix = haslach_interactionmatrix.transport_costs(
82
95
 
83
96
  interaction_matrix = interaction_matrix.flows()
84
97
 
85
- interaction_matrix.get_interaction_matrix_df().to_excel("interaction_matrix.xlsx")
98
+ huff_model = interaction_matrix.marketareas()
86
99
 
87
- interaction_matrix.marketareas()
100
+ print(interaction_matrix.get_interaction_matrix_df())
88
101
 
89
- interaction_matrix.summary()
102
+ print(huff_model.get_market_areas_df())
@@ -0,0 +1,52 @@
1
+ Metadata-Version: 2.1
2
+ Name: huff
3
+ Version: 1.1.1
4
+ Summary: huff: Huff Model Market Area Analysis
5
+ Author: Thomas Wieland
6
+ Author-email: geowieland@googlemail.com
7
+ Description-Content-Type: text/markdown
8
+ Requires-Dist: geopandas
9
+ Requires-Dist: pandas
10
+ Requires-Dist: numpy
11
+
12
+ # huff: Huff Model Market Area Analysis
13
+
14
+ ## Author
15
+
16
+ Thomas Wieland [ORCID](https://orcid.org/0000-0001-5168-9846) [EMail](mailto:geowieland@googlemail.com)
17
+
18
+ See the /tests directory for usage examples of most of the included functions.
19
+
20
+
21
+ ## Features
22
+
23
+ - **Huff Model**:
24
+ - Defining origins and destinations with weightings
25
+ - Creating interaction matrix from origins and destinations
26
+ - Calculating basic Huff Model
27
+ - **Multiplicative Competitive Interaction Model**:
28
+ - Log-centering transformation
29
+ - **OpenRouteService Client** (Tools via API):
30
+ - Creating transport costs matrix from origins and destinations
31
+ - Creating isochrones from destinations
32
+
33
+ Attribution of OpenRouteService:
34
+ © openrouteservice.org by HeiGIT | Map data © OpenStreetMap contributors
35
+ Visit https://openrouteservice.org/
36
+
37
+ ## Literature
38
+ - Huff DL (1962) *Determination of Intra-Urban Retail Trade Areas*.
39
+ - Huff DL (1964) Defining and estimating a trading area. *Journal of Marketing* 28(4): 34–38. [10.2307/1249154](https://doi.org/10.2307/1249154)
40
+ - Huff DL, McCallum BM (2008) Calibrating the Huff Model using ArcGIS Business Analyst. ESRI White Paper, September 2008. https://www.esri.com/library/whitepapers/pdfs/calibrating-huff-model.pdf.
41
+ - De Beule M, Van den Poel D, Van de Weghe N (2014) An extended Huff-model for robustly benchmarking and predicting retail network performance. *Applied Geography*,* 46(1): 80–89. [10.1016/j.apgeog.2013.09.026](https://doi.org/10.1016/j.apgeog.2013.09.026)
42
+ - Nakanishi M, Cooper LG (1974) Parameter estimation for a Multiplicative Competitive Interaction Model: Least squares approach. *Journal of Marketing Research* 11(3): 303–311. [10.2307/3151146](https://doi.org/10.2307/3151146).
43
+ - Wieland T (2017) Market Area Analysis for Retail and Service Locations with MCI. *R Journal* 9(1): 298-323. [10.32614/RJ-2017-020](https://doi.org/10.32614/RJ-2017-020)
44
+ - Wieland T (2018) A Hurdle Model Approach of Store Choice and Market Area Analysis in Grocery Retailing. *Papers in Applied Geography* 4(4): 370-389. [10.1080/23754931.2018.1519458](https://doi.org/10.1080/23754931.2018.1519458)
45
+
46
+
47
+ ## Installation
48
+
49
+ To install the package, use `pip`:
50
+
51
+ ```bash
52
+ pip install huff
@@ -1,9 +1,9 @@
1
1
  huff/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- huff/gistools.py,sha256=pyNYiRG_PwjiVlOLY26zPDIGVHE79uwyz48kGQ4k9us,1625
3
- huff/models.py,sha256=UxFo2laIrNahWrcf-_4xVMO9eR_w7kIBeoG_w_eGD70,21476
4
- huff/ors.py,sha256=TF1US2Tc1L4ny6-S4JFIkDzgxxrtFsD_N9A-FQBabLM,9397
2
+ huff/gistools.py,sha256=YgkuVbNTupIDybGnLQZ4oeEjPhoFtXo132ehbdZ_thk,1625
3
+ huff/models.py,sha256=5_0ZBYz9Pa6ZA2MErJI4YwnZrcIDp3_K6V8UnWGrhuE,30258
4
+ huff/ors.py,sha256=JKJyEttW4LPVzv3xQhl6cRT8KQjrLaVNpbckeQdLDWE,11467
5
5
  huff/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
- huff/tests/tests_huff.py,sha256=6L16suvmcYtMBmgppHnhqJSCr6OI1YJh1Bh114ElfUc,2294
6
+ huff/tests/tests_huff.py,sha256=5csNqE-9_cdNU1wuOQ5CV5bni1S546e24QZo6XMsct0,2515
7
7
  huff/tests/data/Haslach.cpg,sha256=OtMDH1UDpEBK-CUmLugjLMBNTqZoPULF3QovKiesmCQ,5
8
8
  huff/tests/data/Haslach.dbf,sha256=GVPIt05OzDO7UrRDcsMhiYWvyXAPg6Z-qkiysFzj-fc,506
9
9
  huff/tests/data/Haslach.prj,sha256=2Jy1Vlzh7UxQ1MXpZ9UYLs2SxfrObj2xkEkZyLqmGTY,437
@@ -16,7 +16,7 @@ huff/tests/data/Haslach_supermarkets.prj,sha256=2Jy1Vlzh7UxQ1MXpZ9UYLs2SxfrObj2x
16
16
  huff/tests/data/Haslach_supermarkets.qmd,sha256=j9i4_Pz7ZMSG2UDSb3nuhJpw0KWXIRhiiDymqJP6_Fo,2479
17
17
  huff/tests/data/Haslach_supermarkets.shp,sha256=X7QbQ0BTMag_B-bDRbpr-go2BQIXo3Y8zMAKpYZmlps,324
18
18
  huff/tests/data/Haslach_supermarkets.shx,sha256=j23QHX-SmdAeN04rw0x8nUOran-OCg_T6r_LvzzEPWs,164
19
- huff-1.0.0.dist-info/METADATA,sha256=Xl1URHFyO5OqALQcLsBtENr-TNO5cm0b7xo8479hOyg,749
20
- huff-1.0.0.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
21
- huff-1.0.0.dist-info/top_level.txt,sha256=nlzX-PxZNFmIxANIJMySuIFPihd6qOBkRlhIC28NEsQ,5
22
- huff-1.0.0.dist-info/RECORD,,
19
+ huff-1.1.1.dist-info/METADATA,sha256=6aprnoJS3msu7tBJts1j2lBvb1UFriCnZj-uXPUaAZU,2539
20
+ huff-1.1.1.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
21
+ huff-1.1.1.dist-info/top_level.txt,sha256=nlzX-PxZNFmIxANIJMySuIFPihd6qOBkRlhIC28NEsQ,5
22
+ huff-1.1.1.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.8.0)
2
+ Generator: bdist_wheel (0.45.1)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,32 +0,0 @@
1
- Metadata-Version: 2.2
2
- Name: huff
3
- Version: 1.0.0
4
- Summary: huff: Huff Model Market Area Analysis
5
- Author: Thomas Wieland
6
- Author-email: geowieland@googlemail.com
7
- Description-Content-Type: text/markdown
8
- Requires-Dist: geopandas
9
- Requires-Dist: pandas
10
- Requires-Dist: numpy
11
- Dynamic: author
12
- Dynamic: author-email
13
- Dynamic: description
14
- Dynamic: description-content-type
15
- Dynamic: requires-dist
16
- Dynamic: summary
17
-
18
- # huff: Huff Model Market Area Analysis
19
-
20
- ## Author
21
-
22
- Thomas Wieland [ORCID](https://orcid.org/0000-0001-5168-9846) [EMail](mailto:geowieland@googlemail.com)
23
-
24
- See the /tests directory for usage examples of most of the included functions.
25
-
26
-
27
- ## Installation
28
-
29
- To install the package, use `pip`:
30
-
31
- ```bash
32
- pip install huff