huff 1.2.0__py3-none-any.whl → 1.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
huff/gistools.py CHANGED
@@ -4,13 +4,15 @@
4
4
  # Author: Thomas Wieland
5
5
  # ORCID: 0000-0001-5168-9846
6
6
  # mail: geowieland@googlemail.com
7
- # Version: 1.2.0
8
- # Last update: 2025-05-14 18:28
7
+ # Version: 1.3.0
8
+ # Last update: 2025-05-22 05:44
9
9
  # Copyright (c) 2025 Thomas Wieland
10
10
  #-----------------------------------------------------------------------
11
11
 
12
12
 
13
13
  import geopandas as gp
14
+ import pandas as pd
15
+ from pandas.api.types import is_numeric_dtype
14
16
  from math import pi, sin, cos, acos
15
17
 
16
18
 
@@ -60,6 +62,65 @@ def distance_matrix(
60
62
  return matrix
61
63
 
62
64
 
65
+ def buffers(
66
+ point_gdf: gp.GeoDataFrame,
67
+ unique_id_col: str,
68
+ distances: list,
69
+ donut: bool = True,
70
+ save_output: bool = True,
71
+ output_filepath: str = "buffers.shp",
72
+ output_crs: str = "EPSG:4326"
73
+ ):
74
+
75
+ all_buffers_gdf = gp.GeoDataFrame(columns=[unique_id_col, "segment", "geometry"])
76
+
77
+ for idx, row in point_gdf.iterrows():
78
+
79
+ point_buffers = []
80
+
81
+ for distance in distances:
82
+
83
+ point = row["geometry"]
84
+ point_buffer = point.buffer(distance)
85
+
86
+ point_buffer_gdf = gp.GeoDataFrame(
87
+ {
88
+ unique_id_col: row[unique_id_col],
89
+ "geometry": [point_buffer],
90
+ "segment": [distance]
91
+ },
92
+ crs=point_gdf.crs
93
+ )
94
+
95
+ point_buffers.append(point_buffer_gdf)
96
+
97
+ point_buffers_gdf = pd.concat(
98
+ point_buffers,
99
+ ignore_index = True
100
+ )
101
+
102
+ if donut:
103
+ point_buffers_gdf = overlay_difference(
104
+ polygon_gdf = point_buffers_gdf,
105
+ sort_col = "segment"
106
+ )
107
+
108
+ all_buffers_gdf = pd.concat(
109
+ [
110
+ all_buffers_gdf,
111
+ point_buffers_gdf
112
+ ],
113
+ ignore_index = True)
114
+
115
+ all_buffers_gdf = all_buffers_gdf.to_crs(output_crs)
116
+
117
+ if save_output:
118
+ all_buffers_gdf.to_file(output_filepath)
119
+ print ("Saved as", output_filepath)
120
+
121
+ return all_buffers_gdf
122
+
123
+
63
124
  def overlay_difference(
64
125
  polygon_gdf: gp.GeoDataFrame,
65
126
  sort_col: str = None,
@@ -74,6 +135,7 @@ def overlay_difference(
74
135
  new_data = []
75
136
 
76
137
  for i in range(len(polygon_gdf) - 1, 0, -1):
138
+
77
139
  current_polygon = polygon_gdf.iloc[i].geometry
78
140
  previous_polygon = polygon_gdf.iloc[i - 1].geometry
79
141
  difference_polygon = current_polygon.difference(previous_polygon)
@@ -85,7 +147,9 @@ def overlay_difference(
85
147
  new_data.append(polygon_gdf.iloc[i].drop("geometry"))
86
148
 
87
149
  inner_most_polygon = polygon_gdf.iloc[0].geometry
150
+
88
151
  if inner_most_polygon.is_valid:
152
+
89
153
  new_geometries.append(inner_most_polygon)
90
154
  new_data.append(polygon_gdf.iloc[0].drop("geometry"))
91
155
 
@@ -93,4 +157,60 @@ def overlay_difference(
93
157
  new_data, geometry=new_geometries, crs=polygon_gdf.crs
94
158
  )
95
159
 
96
- return polygon_gdf_difference
160
+ return polygon_gdf_difference
161
+
162
+
163
+ def point_spatial_join(
164
+ polygon_gdf: gp.GeoDataFrame,
165
+ point_gdf: gp.GeoDataFrame,
166
+ join_type: str = "inner",
167
+ polygon_ref_cols: list = [],
168
+ point_stat_col: str = None
169
+ ):
170
+
171
+ if polygon_gdf.crs != point_gdf.crs:
172
+ raise ValueError (f"Coordinate reference systems of polygon and point data do not match. Polygons: {str(polygon_gdf.crs)}, points: {str(point_gdf.crs)}")
173
+
174
+ if polygon_ref_cols != []:
175
+ for polygon_ref_col in polygon_ref_cols:
176
+ if polygon_ref_col not in polygon_gdf.columns:
177
+ raise KeyError (f"Column {polygon_ref_col} not in polygon data")
178
+
179
+ if point_stat_col is not None:
180
+ if point_stat_col not in point_gdf.columns:
181
+ raise KeyError (f"Column {point_stat_col} not in point data")
182
+ if not is_numeric_dtype(point_gdf[point_stat_col]):
183
+ raise TypeError (f"Column {point_stat_col} is not numeric")
184
+
185
+ shp_points_gdf_join = point_gdf.sjoin(
186
+ polygon_gdf,
187
+ how=join_type
188
+ )
189
+
190
+ spatial_join_stat = None
191
+
192
+ if polygon_ref_cols != [] and point_stat_col is not None:
193
+ shp_points_gdf_join_count = shp_points_gdf_join.groupby(polygon_ref_cols)[point_stat_col].count()
194
+ shp_points_gdf_join_sum = shp_points_gdf_join.groupby(polygon_ref_cols)[point_stat_col].sum()
195
+ shp_points_gdf_join_min = shp_points_gdf_join.groupby(polygon_ref_cols)[point_stat_col].min()
196
+ shp_points_gdf_join_max = shp_points_gdf_join.groupby(polygon_ref_cols)[point_stat_col].max()
197
+ shp_points_gdf_join_mean = shp_points_gdf_join.groupby(polygon_ref_cols)[point_stat_col].mean()
198
+
199
+ shp_points_gdf_join_count = shp_points_gdf_join_count.rename("count").to_frame()
200
+ shp_points_gdf_join_sum = shp_points_gdf_join_sum.rename("sum").to_frame()
201
+ shp_points_gdf_join_min = shp_points_gdf_join_min.rename("min").to_frame()
202
+ shp_points_gdf_join_max = shp_points_gdf_join_max.rename("max").to_frame()
203
+ shp_points_gdf_join_mean = shp_points_gdf_join_mean.rename("mean").to_frame()
204
+ spatial_join_stat = shp_points_gdf_join_count.join(
205
+ [
206
+ shp_points_gdf_join_sum,
207
+ shp_points_gdf_join_min,
208
+ shp_points_gdf_join_max,
209
+ shp_points_gdf_join_mean
210
+ ]
211
+ )
212
+
213
+ return [
214
+ shp_points_gdf_join,
215
+ spatial_join_stat
216
+ ]
huff/models.py CHANGED
@@ -4,8 +4,8 @@
4
4
  # Author: Thomas Wieland
5
5
  # ORCID: 0000-0001-5168-9846
6
6
  # mail: geowieland@googlemail.com
7
- # Version: 1.2.0
8
- # Last update: 2025-05-14 18:33
7
+ # Version: 1.3.0
8
+ # Last update: 2025-05-22 05:45
9
9
  # Copyright (c) 2025 Thomas Wieland
10
10
  #-----------------------------------------------------------------------
11
11
 
@@ -18,7 +18,7 @@ from statsmodels.formula.api import ols
18
18
  from shapely.geometry import Point
19
19
  from shapely import wkt
20
20
  from huff.ors import Client, TimeDistanceMatrix, Isochrone
21
- from huff.gistools import overlay_difference, distance_matrix
21
+ from huff.gistools import overlay_difference, distance_matrix, buffers
22
22
 
23
23
 
24
24
  class CustomerOrigins:
@@ -28,13 +28,15 @@ class CustomerOrigins:
28
28
  geodata_gpd,
29
29
  geodata_gpd_original,
30
30
  metadata,
31
- isochrones_gdf
31
+ isochrones_gdf,
32
+ buffers_gdf
32
33
  ):
33
34
 
34
35
  self.geodata_gpd = geodata_gpd
35
36
  self.geodata_gpd_original = geodata_gpd_original
36
37
  self.metadata = metadata
37
38
  self.isochrones_gdf = isochrones_gdf
39
+ self.buffers_gdf = buffers_gdf
38
40
 
39
41
  def get_geodata_gpd(self):
40
42
 
@@ -52,6 +54,10 @@ class CustomerOrigins:
52
54
 
53
55
  return self.isochrones_gdf
54
56
 
57
+ def get_buffers(self):
58
+
59
+ return self.buffers_gdf
60
+
55
61
  def summary(self):
56
62
 
57
63
  metadata = self.metadata
@@ -77,6 +83,11 @@ class CustomerOrigins:
77
83
  else:
78
84
  print("Including isochrones YES")
79
85
 
86
+ if self.buffers_gdf is None:
87
+ print("Including buffers NO")
88
+ else:
89
+ print("Including buffers YES")
90
+
80
91
  return metadata
81
92
 
82
93
  def define_marketsize(
@@ -151,6 +162,32 @@ class CustomerOrigins:
151
162
 
152
163
  return self
153
164
 
165
+ def buffers(
166
+ self,
167
+ segments_distance: list = [500, 1000],
168
+ donut: bool = True,
169
+ save_output: bool = True,
170
+ output_filepath: str = "customer_origins_buffers.shp",
171
+ output_crs: str = "EPSG:4326"
172
+ ):
173
+
174
+ geodata_gpd_original = self.get_geodata_gpd_original()
175
+ metadata = self.metadata
176
+
177
+ buffers_gdf = buffers(
178
+ point_gdf = geodata_gpd_original,
179
+ unique_id_col = metadata["unique_id"],
180
+ distances = segments_distance,
181
+ donut = donut,
182
+ save_output = save_output,
183
+ output_filepath = output_filepath,
184
+ output_crs = output_crs
185
+ )
186
+
187
+ self.buffers_gdf = buffers_gdf
188
+
189
+ return self
190
+
154
191
 
155
192
  class SupplyLocations:
156
193
 
@@ -159,13 +196,15 @@ class SupplyLocations:
159
196
  geodata_gpd,
160
197
  geodata_gpd_original,
161
198
  metadata,
162
- isochrones_gdf
199
+ isochrones_gdf,
200
+ buffers_gdf
163
201
  ):
164
202
 
165
203
  self.geodata_gpd = geodata_gpd
166
204
  self.geodata_gpd_original = geodata_gpd_original
167
205
  self.metadata = metadata
168
206
  self.isochrones_gdf = isochrones_gdf
207
+ self.buffers_gdf = buffers_gdf
169
208
 
170
209
  def get_geodata_gpd(self):
171
210
 
@@ -182,6 +221,10 @@ class SupplyLocations:
182
221
  def get_isochrones_gdf(self):
183
222
 
184
223
  return self.isochrones_gdf
224
+
225
+ def get_buffers_gdf(self):
226
+
227
+ return self.buffers_gdf
185
228
 
186
229
  def summary(self):
187
230
 
@@ -346,6 +389,32 @@ class SupplyLocations:
346
389
 
347
390
  return self
348
391
 
392
+ def buffers(
393
+ self,
394
+ segments_distance: list = [500, 1000],
395
+ donut: bool = True,
396
+ save_output: bool = True,
397
+ output_filepath: str = "supply_locations_buffers.shp",
398
+ output_crs: str = "EPSG:4326"
399
+ ):
400
+
401
+ geodata_gpd_original = self.get_geodata_gpd_original()
402
+ metadata = self.metadata
403
+
404
+ buffers_gdf = buffers(
405
+ point_gdf = geodata_gpd_original,
406
+ unique_id_col = metadata["unique_id"],
407
+ distances = segments_distance,
408
+ donut = donut,
409
+ save_output = save_output,
410
+ output_filepath = output_filepath,
411
+ output_crs = output_crs
412
+ )
413
+
414
+ self.buffers_gdf = buffers_gdf
415
+
416
+ return self
417
+
349
418
 
350
419
  class InteractionMatrix:
351
420
 
@@ -459,14 +528,10 @@ class InteractionMatrix:
459
528
  range_type = transport_costs_matrix_config["range_type"]
460
529
 
461
530
  transport_costs_matrix["source"] = transport_costs_matrix["source"].astype(int)
462
- transport_costs_matrix["source"] = transport_costs_matrix["source"].map(
463
- dict(enumerate(customer_origins_ids))
464
- )
531
+ transport_costs_matrix["source"] = transport_costs_matrix["source"].map(dict(enumerate(customer_origins_ids)))
465
532
 
466
533
  transport_costs_matrix["destination"] = transport_costs_matrix["destination"].astype(int)
467
- transport_costs_matrix["destination"] = transport_costs_matrix["destination"].map(
468
- dict(enumerate(supply_locations_ids))
469
- )
534
+ transport_costs_matrix["destination"] = transport_costs_matrix["destination"].map(dict(enumerate(supply_locations_ids)))
470
535
 
471
536
  transport_costs_matrix["source_destination"] = transport_costs_matrix["source"].astype(str)+"_"+transport_costs_matrix["destination"].astype(str)
472
537
  transport_costs_matrix = transport_costs_matrix[["source_destination", range_type]]
@@ -1080,16 +1145,20 @@ def load_geodata (
1080
1145
  ):
1081
1146
 
1082
1147
  if location_type is None or (location_type != "origins" and location_type != "destinations"):
1083
- raise ValueError ("location_type must be either 'origins' or 'destinations'")
1148
+ raise ValueError ("Argument location_type must be either 'origins' or 'destinations'")
1084
1149
 
1085
1150
  if isinstance(data, gp.GeoDataFrame):
1086
1151
  geodata_gpd_original = data
1152
+ if not all(geodata_gpd_original.geometry.geom_type == "Point"):
1153
+ raise ValueError ("Input geopandas.GeoDataFrame must be of type 'Point'")
1087
1154
  crs_input = geodata_gpd_original.crs
1088
1155
  elif isinstance(data, pd.DataFrame):
1089
1156
  geodata_tab = data
1090
1157
  elif isinstance(data, str):
1091
1158
  if data_type == "shp":
1092
1159
  geodata_gpd_original = gp.read_file(data)
1160
+ if not all(geodata_gpd_original.geometry.geom_type == "Point"):
1161
+ raise ValueError ("Input shapefile must be of type 'Point'")
1093
1162
  crs_input = geodata_gpd_original.crs
1094
1163
  elif data_type == "csv" or data_type == "xlsx":
1095
1164
  if x_col is None:
@@ -1111,6 +1180,12 @@ def load_geodata (
1111
1180
  raise TypeError("data must be pandas.DataFrame, geopandas.GeoDataFrame or file (.csv, .xlsx, .shp)")
1112
1181
 
1113
1182
  if data_type == "csv" or data_type == "xlsx" or (isinstance(data, pd.DataFrame) and not isinstance(data, gp.GeoDataFrame)):
1183
+
1184
+ check_vars(
1185
+ df = geodata_tab,
1186
+ cols = [x_col, y_col]
1187
+ )
1188
+
1114
1189
  geodata_gpd_original = gp.GeoDataFrame(
1115
1190
  geodata_tab,
1116
1191
  geometry = gp.points_from_xy(
@@ -1146,6 +1221,7 @@ def load_geodata (
1146
1221
  geodata_gpd,
1147
1222
  geodata_gpd_original,
1148
1223
  metadata,
1224
+ None,
1149
1225
  None
1150
1226
  )
1151
1227
  elif location_type == "destinations":
@@ -1153,6 +1229,7 @@ def load_geodata (
1153
1229
  geodata_gpd,
1154
1230
  geodata_gpd_original,
1155
1231
  metadata,
1232
+ None,
1156
1233
  None
1157
1234
  )
1158
1235
 
@@ -1349,7 +1426,8 @@ def load_interaction_matrix(
1349
1426
  geodata_gpd = customer_origins_geodata_gpd,
1350
1427
  geodata_gpd_original = customer_origins_geodata_original_tab,
1351
1428
  metadata = customer_origins_metadata,
1352
- isochrones_gdf = None
1429
+ isochrones_gdf = None,
1430
+ buffers_gdf = None
1353
1431
  )
1354
1432
 
1355
1433
  if supply_locations_coords_col is not None:
@@ -1415,7 +1493,8 @@ def load_interaction_matrix(
1415
1493
  geodata_gpd = supply_locations_geodata_gpd,
1416
1494
  geodata_gpd_original = supply_locations_geodata_original_tab,
1417
1495
  metadata = supply_locations_metadata,
1418
- isochrones_gdf = None
1496
+ isochrones_gdf = None,
1497
+ buffers_gdf = None
1419
1498
  )
1420
1499
 
1421
1500
  interaction_matrix_df = interaction_matrix_df.rename(
huff/ors.py CHANGED
@@ -4,8 +4,8 @@
4
4
  # Author: Thomas Wieland
5
5
  # ORCID: 0000-0001-5168-9846
6
6
  # mail: geowieland@googlemail.com
7
- # Version: 1.2.0
8
- # Last update: 2025-05-14 18:28
7
+ # Version: 1.2.1
8
+ # Last update: 2025-05-22 05:46
9
9
  # Copyright (c) 2025 Thomas Wieland
10
10
  #-----------------------------------------------------------------------
11
11
 
@@ -162,9 +162,9 @@ class Client:
162
162
  timeout=timeout
163
163
  )
164
164
 
165
- except:
165
+ except Exception as e:
166
166
 
167
- print ("Unknown error while accessing ORS server")
167
+ print ("Error while accessing ORS server: ", str(e))
168
168
 
169
169
  status_code = 99999
170
170
  isochrones_gdf = None
@@ -212,9 +212,9 @@ class Client:
212
212
  print ("Saved as", output_filepath)
213
213
 
214
214
  else:
215
-
216
- print ("Error while accessing ORS server. Status Code: " + str(status_code))
217
-
215
+
216
+ print(f"Error while accessing ORS server. Status code: {status_code} - {response.reason}")
217
+
218
218
  isochrones_gdf = None
219
219
  metadata = None
220
220
 
@@ -233,14 +233,14 @@ class Client:
233
233
  sources: list = [],
234
234
  destinations: list = [],
235
235
  id: str = None,
236
- range_type = "time",
237
- profile = "driving-car",
236
+ range_type: str = "time",
237
+ profile: str = "driving-car",
238
238
  metrics: list = [],
239
239
  resolve_locations: bool = False,
240
240
  units: str = "mi",
241
- timeout = 10,
242
- save_output = False,
243
- output_filepath = "matrix.csv",
241
+ timeout: int = 10,
242
+ save_output: bool = False,
243
+ output_filepath: str = "matrix.csv",
244
244
  csv_sep = ";",
245
245
  csv_decimal = ",",
246
246
  csv_encoding = None
@@ -285,9 +285,9 @@ class Client:
285
285
  timeout=timeout
286
286
  )
287
287
 
288
- except:
289
-
290
- print ("Unknown error while accessing ORS server")
288
+ except Exception as e:
289
+
290
+ print ("Error while accessing ORS server: ", str(e))
291
291
 
292
292
  status_code = 99999
293
293
  matrix_df = None
@@ -368,7 +368,7 @@ class Client:
368
368
 
369
369
  else:
370
370
 
371
- print ("Error in accessing ORS server. Status Code: " + str(status_code))
371
+ print(f"Error while accessing ORS server. Status code: {status_code} - {response.reason}")
372
372
 
373
373
  matrix_df = None
374
374
  metadata = None
huff/osm.py ADDED
@@ -0,0 +1,207 @@
1
+ #-----------------------------------------------------------------------
2
+ # Name: osm (huff package)
3
+ # Purpose: Helper functions for OpenStreetMap API
4
+ # Author: Thomas Wieland
5
+ # ORCID: 0000-0001-5168-9846
6
+ # mail: geowieland@googlemail.com
7
+ # Version: 1.0.0
8
+ # Last update: 2025-05-22 05:46
9
+ # Copyright (c) 2025 Thomas Wieland
10
+ #-----------------------------------------------------------------------
11
+
12
+
13
+ import pandas as pd
14
+ import geopandas as gpd
15
+ import math
16
+ import requests
17
+ import tempfile
18
+ import time
19
+ import os
20
+ from PIL import Image
21
+ import matplotlib.pyplot as plt
22
+ import contextily as cx
23
+ from shapely.geometry import box
24
+
25
+
26
+ class Client:
27
+
28
+ def __init__(
29
+ self,
30
+ server = "http://a.tile.openstreetmap.org/",
31
+ headers = {
32
+ 'User-Agent': 'huff.osm/1.0.0 (your_name@your_email_provider.com)'
33
+ }
34
+ ):
35
+
36
+ self.server = server
37
+ self.headers = headers
38
+
39
+ def download_tile(
40
+ self,
41
+ zoom,
42
+ x,
43
+ y,
44
+ timeout = 10
45
+ ):
46
+
47
+ osm_url = self.server + f"{zoom}/{x}/{y}.png"
48
+
49
+ response = requests.get(
50
+ osm_url,
51
+ headers = self.headers,
52
+ timeout = timeout
53
+ )
54
+
55
+ if response.status_code == 200:
56
+
57
+ with tempfile.NamedTemporaryFile(delete=False, suffix='.png') as tmp_file:
58
+ tmp_file.write(response.content)
59
+ tmp_file_path = tmp_file.name
60
+ return Image.open(tmp_file_path)
61
+
62
+ else:
63
+
64
+ print(f"Error while accessing OSM server. Status code: {response.status_code} - {response.reason}")
65
+
66
+ return None
67
+
68
+
69
+ def get_basemap(
70
+ sw_lat,
71
+ sw_lon,
72
+ ne_lat,
73
+ ne_lon,
74
+ zoom = 15
75
+ ):
76
+
77
+ def lat_lon_to_tile(
78
+ lat,
79
+ lon,
80
+ zoom
81
+ ):
82
+
83
+ n = 2 ** zoom
84
+ x = int(n * ((lon + 180) / 360))
85
+ y = int(n * (1 - (math.log(math.tan(math.radians(lat)) + 1 / math.cos(math.radians(lat))) / math.pi)) / 2)
86
+ return x, y
87
+
88
+ def stitch_tiles(
89
+ zoom,
90
+ sw_lat,
91
+ sw_lon,
92
+ ne_lat,
93
+ ne_lon,
94
+ delay = 0.1
95
+ ):
96
+
97
+ osm_client = Client(
98
+ server = "http://a.tile.openstreetmap.org/"
99
+ )
100
+
101
+ sw_x_tile, sw_y_tile = lat_lon_to_tile(sw_lat, sw_lon, zoom)
102
+ ne_x_tile, ne_y_tile = lat_lon_to_tile(ne_lat, ne_lon, zoom)
103
+
104
+ tile_size = 256
105
+ width = (ne_x_tile - sw_x_tile + 1) * tile_size
106
+ height = (sw_y_tile - ne_y_tile + 1) * tile_size
107
+
108
+ stitched_image = Image.new('RGB', (width, height))
109
+
110
+ for x in range(sw_x_tile, ne_x_tile + 1):
111
+ for y in range(ne_y_tile, sw_y_tile + 1):
112
+ tile = osm_client.download_tile(
113
+ zoom = zoom,
114
+ x = x,
115
+ y = y
116
+ )
117
+ if tile:
118
+
119
+ stitched_image.paste(tile, ((x - sw_x_tile) * tile_size, (sw_y_tile - y) * tile_size))
120
+ else:
121
+ print(f"Error while retrieving tile {x}, {y}.")
122
+
123
+ time.sleep(delay)
124
+
125
+ return stitched_image
126
+
127
+ stitched_image = stitch_tiles(zoom, sw_lat, sw_lon, ne_lat, ne_lon)
128
+
129
+ if stitched_image:
130
+
131
+ stitched_image_path = "osm_map.png"
132
+ stitched_image.save(stitched_image_path)
133
+
134
+ else:
135
+ print("Error while building stitched images")
136
+
137
+
138
+ def map_with_basemap(
139
+ layers: list,
140
+ osm_basemap: bool = True,
141
+ zoom: int = 15,
142
+ styles: dict = {},
143
+ save_output: bool = True,
144
+ output_filepath: str = "osm_map_with_basemap.png",
145
+ output_dpi = 300
146
+ ):
147
+
148
+ if not layers:
149
+ raise ValueError("List layers is empty")
150
+
151
+ combined = gpd.GeoDataFrame(
152
+ pd.concat(
153
+ layers,
154
+ ignore_index = True
155
+ ),
156
+ crs = layers[0].crs
157
+ )
158
+
159
+ combined_wgs84 = combined.to_crs(epsg=4326)
160
+ bounds = combined_wgs84.total_bounds
161
+
162
+ sw_lon, sw_lat, ne_lon, ne_lat = bounds[0]*0.9999, bounds[1]*0.9999, bounds[2]*1.0001, bounds[3]*1.0001
163
+
164
+ if osm_basemap:
165
+
166
+ get_basemap(sw_lat, sw_lon, ne_lat, ne_lon, zoom=zoom)
167
+
168
+ fig, ax = plt.subplots(figsize=(10, 10))
169
+
170
+ if osm_basemap:
171
+
172
+ img = Image.open("osm_map.png")
173
+ extent_img = [sw_lon, ne_lon, sw_lat, ne_lat]
174
+ ax.imshow(img, extent=extent_img, origin="upper")
175
+
176
+ for layer in layers:
177
+ layer_3857 = layer.to_crs(epsg=3857)
178
+ layer_3857.plot(ax=ax, alpha=0.6)
179
+
180
+ bbox = box(sw_lon, sw_lat, ne_lon, ne_lat)
181
+ extent_geom = gpd.GeoSeries([bbox], crs=4326).to_crs(epsg=3857).total_bounds
182
+
183
+ ax.set_xlim(extent_geom[0], extent_geom[2])
184
+ ax.set_ylim(extent_geom[1], extent_geom[3])
185
+
186
+ if osm_basemap:
187
+ cx.add_basemap(
188
+ ax,
189
+ source=cx.providers.OpenStreetMap.Mapnik,
190
+ zoom=zoom
191
+ )
192
+
193
+ plt.axis('off')
194
+ plt.show()
195
+
196
+ if save_output:
197
+
198
+ plt.savefig(
199
+ output_filepath,
200
+ dpi = output_dpi,
201
+ bbox_inches="tight"
202
+ )
203
+
204
+ plt.close()
205
+
206
+ if os.path.exists("osm_map.png"):
207
+ os.remove("osm_map.png")
huff/tests/tests_huff.py CHANGED
@@ -4,13 +4,16 @@
4
4
  # Author: Thomas Wieland
5
5
  # ORCID: 0000-0001-5168-9846
6
6
  # mail: geowieland@googlemail.com
7
- # Version: 1.2.0
8
- # Last update: 2025-05-14 18:33
7
+ # Version: 1.3.0
8
+ # Last update: 2025-05-22 05:41
9
9
  # Copyright (c) 2025 Thomas Wieland
10
10
  #-----------------------------------------------------------------------
11
11
 
12
12
 
13
13
  from huff.models import create_interaction_matrix, get_isochrones, load_geodata, load_interaction_matrix
14
+ from huff.osm import map_with_basemap
15
+ from huff.gistools import buffers, point_spatial_join
16
+
14
17
 
15
18
  # Customer origins (statistical districts):
16
19
 
@@ -21,6 +24,14 @@ Haslach = load_geodata(
21
24
  )
22
25
  # Loading customer origins (shapefile)
23
26
 
27
+ Haslach_buf = Haslach.buffers(
28
+ segments_distance=[500,1000,1500],
29
+ save_output=True,
30
+ output_filepath="Haslach_buf.shp",
31
+ output_crs="EPSG:31467"
32
+ )
33
+ # Buffers for customer origins
34
+
24
35
  Haslach.summary()
25
36
  # Summary of customer origins
26
37
 
@@ -56,22 +67,24 @@ Haslach_supermarkets.define_attraction_weighting(
56
67
  )
57
68
  # Define attraction weighting (gamma)
58
69
 
59
- # Haslach_supermarkets.isochrones(
60
- # segments_minutes=[5, 10, 15],
61
- # profile = "driving-car",
62
- # save_output=True,
63
- # ors_auth="5b3ce3597851110001cf62480a15aafdb5a64f4d91805929f8af6abd",
64
- # output_filepath="Haslach_supermarkets_iso.shp"
65
- # )
66
- # # Obtaining isochrones for driving by car (5, 10 and 15 minutes)
70
+ Haslach_supermarkets.isochrones(
71
+ segments_minutes=[5, 10],
72
+ profile = "foot-walking",
73
+ save_output=True,
74
+ ors_auth="5b3ce3597851110001cf62480a15aafdb5a64f4d91805929f8af6abd",
75
+ output_filepath="Haslach_supermarkets_iso.shp",
76
+ output_crs="EPSG:31467"
77
+ )
78
+ # Obtaining isochrones for walking (5 and 10 minutes)
79
+ # ORS API documentation: https://openrouteservice.org/dev/#/api-docs/v2/
67
80
 
68
- # Haslach_supermarkets.summary()
69
- # # Summary of updated customer origins
81
+ Haslach_supermarkets.summary()
82
+ # Summary of updated customer origins
70
83
 
71
- # Haslach_supermarkets_isochrones = Haslach_supermarkets.get_isochrones_gdf()
72
- # # Extracting isochrones
84
+ Haslach_supermarkets_isochrones = Haslach_supermarkets.get_isochrones_gdf()
85
+ # Extracting isochrones
73
86
 
74
- # print(Haslach_supermarkets_isochrones)
87
+ print(Haslach_supermarkets_isochrones)
75
88
 
76
89
 
77
90
  # Using customer origins and supply locations for building interaction matrix:
@@ -83,10 +96,11 @@ haslach_interactionmatrix = create_interaction_matrix(
83
96
  # Creating interaction matrix
84
97
 
85
98
  interaction_matrix = haslach_interactionmatrix.transport_costs(
86
- #ors_auth="5b3ce3597851110001cf62480a15aafdb5a64f4d91805929f8af6abd"
87
- network=False
99
+ ors_auth="5b3ce3597851110001cf62480a15aafdb5a64f4d91805929f8af6abd"
100
+ #network=False
88
101
  )
89
102
  # Obtaining transport costs (default: driving-car)
103
+ # ORS API documentation: https://openrouteservice.org/dev/#/api-docs/v2/
90
104
 
91
105
  interaction_matrix = interaction_matrix.flows()
92
106
  # Calculating spatial flows
@@ -120,6 +134,7 @@ mci_fit.get_market_areas_df()
120
134
 
121
135
 
122
136
  # Loading own interaction matrix:
137
+ # Data source: Wieland 2015 (https://nbn-resolving.org/urn:nbn:de:bvb:20-opus-180753)
123
138
 
124
139
  Wieland2015_interaction_matrix = load_interaction_matrix(
125
140
  data="data/Wieland2015.xlsx",
@@ -134,7 +149,6 @@ Wieland2015_interaction_matrix = load_interaction_matrix(
134
149
  probabilities_col="MA",
135
150
  data_type="xlsx"
136
151
  )
137
- # Data source: Wieland 2015 (https://nbn-resolving.org/urn:nbn:de:bvb:20-opus-180753)
138
152
 
139
153
  Wieland2015_interaction_matrix.summary()
140
154
  # Summary of interaction matrix
@@ -150,4 +164,44 @@ Wieland2015_fit = Wieland2015_interaction_matrix.mci_fit(
150
164
  # Fitting MCI model with four independent variables
151
165
 
152
166
  Wieland2015_fit.summary()
153
- # MCI model summary
167
+ # MCI model summary
168
+
169
+
170
+ # Buffer analysis:
171
+
172
+ Haslach_supermarkets_gdf = Haslach_supermarkets.get_geodata_gpd_original()
173
+ Haslach_buffers = Haslach_buf.get_buffers()
174
+ # Extracting points and buffer polygons
175
+
176
+ spj_test = point_spatial_join(
177
+ polygon_gdf = Haslach_buffers,
178
+ point_gdf = Haslach_supermarkets_gdf,
179
+ polygon_ref_cols = ["BEZEICHN", "segment"],
180
+ point_stat_col = "VKF_qm"
181
+ )
182
+ # Spatial join with buffers and points
183
+ # Statistics for supermarkets by statistical districts
184
+
185
+ spj_test[0].to_file("spj_test.shp")
186
+ # Save joined points as shapefile
187
+
188
+ print(spj_test[1])
189
+ # Showing df with overlay statistics
190
+
191
+
192
+ # Creating map:
193
+
194
+ Haslach_gdf = Haslach.get_geodata_gpd_original()
195
+ Haslach_supermarkets_gdf = Haslach_supermarkets.get_geodata_gpd_original()
196
+ Haslach_supermarkets_gdf_iso = Haslach_supermarkets.get_isochrones_gdf()
197
+ # Extracttion geopandas.GeoDataFrames
198
+
199
+ map_with_basemap(
200
+ layers = [
201
+ Haslach_supermarkets_gdf_iso,
202
+ Haslach_gdf,
203
+ Haslach_supermarkets_gdf
204
+ ],
205
+ output_filepath = "Haslach_map.png"
206
+ )
207
+ # Map with three layers and OSM basemap
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: huff
3
- Version: 1.2.0
3
+ Version: 1.3.0
4
4
  Summary: huff: Huff Model Market Area Analysis
5
5
  Author: Thomas Wieland
6
6
  Author-email: geowieland@googlemail.com
@@ -8,10 +8,13 @@ Description-Content-Type: text/markdown
8
8
  Requires-Dist: geopandas
9
9
  Requires-Dist: pandas
10
10
  Requires-Dist: numpy
11
- Requires-Dist: time
12
11
  Requires-Dist: statsmodels
13
12
  Requires-Dist: shapely
14
- Requires-Dist: request
13
+ Requires-Dist: requests
14
+ Requires-Dist: matplotlib
15
+ Requires-Dist: pillow
16
+ Requires-Dist: contextily
17
+ Requires-Dist: openpyxl
15
18
 
16
19
  # huff: Huff Model Market Area Analysis
17
20
 
@@ -27,28 +30,39 @@ See the /tests directory for usage examples of most of the included functions.
27
30
  - **Huff Model**:
28
31
  - Defining origins and destinations with weightings
29
32
  - Creating interaction matrix from origins and destinations
30
- - Calculating basic Huff Model
33
+ - Market simulation with basic Huff Model
31
34
  - **Multiplicative Competitive Interaction Model**:
32
- - Log-centering transformation
33
- - Fitting MCI model with >= independent variables
34
- - **OpenRouteService Client** (1) (Tools via API):
35
- - Creating transport costs matrix from origins and destinations
36
- - Creating isochrones from destinations
37
- - **Tools**:
35
+ - Log-centering transformation of interaction matrix
36
+ - Fitting MCI model with >= 2 independent variables
37
+ - MCI model market simulation
38
+ - **GIS tools**:
39
+ - OpenRouteService Client (1):
40
+ - Creating transport costs matrix from origins and destinations
41
+ - Creating isochrones from origins and destinations
42
+ - OpenStreetMap Client (2):
43
+ - Creating simple maps with OSM basemap
44
+ - Other GIS tools:
45
+ - Creating buffers from geodata
46
+ - Spatial join with with statistics
47
+ - Creating euclidean distance matrix from origins and destinations
48
+ - Overlay-difference analysis of polygons
49
+ - **Data management tools**:
38
50
  - Loading own interaction matrix for analysis
39
- - GIS tools for preparing geodata
51
+ - Creating origins/destinations objects from point geodata
40
52
 
41
- (1) Attribution of OpenRouteService:
42
- © openrouteservice.org by HeiGIT | Map data © OpenStreetMap contributors | https://openrouteservice.org/
53
+ (1) © openrouteservice.org by HeiGIT | Map data © OpenStreetMap contributors | https://openrouteservice.org/
54
+ (2) © OpenStreetMap contributors | available under the Open Database License | https://www.openstreetmap.org/
43
55
 
44
56
  ## Literature
45
57
  - Huff DL (1962) *Determination of Intra-Urban Retail Trade Areas*.
46
58
  - Huff DL (1964) Defining and estimating a trading area. *Journal of Marketing* 28(4): 34–38. [10.2307/1249154](https://doi.org/10.2307/1249154)
47
59
  - Huff DL, McCallum BM (2008) Calibrating the Huff Model using ArcGIS Business Analyst. ESRI White Paper, September 2008. https://www.esri.com/library/whitepapers/pdfs/calibrating-huff-model.pdf.
48
- - De Beule M, Van den Poel D, Van de Weghe N (2014) An extended Huff-model for robustly benchmarking and predicting retail network performance. *Applied Geography*,* 46(1): 80–89. [10.1016/j.apgeog.2013.09.026](https://doi.org/10.1016/j.apgeog.2013.09.026)
60
+ - De Beule M, Van den Poel D, Van de Weghe N (2014) An extended Huff-model for robustly benchmarking and predicting retail network performance. *Applied Geography* 46(1): 80–89. [10.1016/j.apgeog.2013.09.026](https://doi.org/10.1016/j.apgeog.2013.09.026)
49
61
  - Nakanishi M, Cooper LG (1974) Parameter estimation for a Multiplicative Competitive Interaction Model: Least squares approach. *Journal of Marketing Research* 11(3): 303–311. [10.2307/3151146](https://doi.org/10.2307/3151146).
62
+ - Nakanishi M, Cooper LG (1982) Technical Note — Simplified Estimation Procedures for MCI Models. *Marketing Science* 1(3): 314-322. [10.1287/mksc.1.3.314](https://doi.org/10.1287/mksc.1.3.314)
50
63
  - Wieland T (2017) Market Area Analysis for Retail and Service Locations with MCI. *R Journal* 9(1): 298-323. [10.32614/RJ-2017-020](https://doi.org/10.32614/RJ-2017-020)
51
64
  - Wieland T (2018) A Hurdle Model Approach of Store Choice and Market Area Analysis in Grocery Retailing. *Papers in Applied Geography* 4(4): 370-389. [10.1080/23754931.2018.1519458](https://doi.org/10.1080/23754931.2018.1519458)
65
+ - Wieland T (2023) Spatial shopping behavior during the Corona pandemic: insights from a micro-econometric store choice model for consumer electronics and furniture retailing in Germany. *Journal of Geographical Systems* 25(2): 291–326. [10.1007/s10109-023-00408-x](https://doi.org/10.1007/s10109-023-00408-x)
52
66
 
53
67
 
54
68
  ## Installation
@@ -1,9 +1,10 @@
1
1
  huff/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- huff/gistools.py,sha256=hfTP8zEQwlPMp505IgEl9uT5nJczBABxVsQ5MVE8fIU,2721
3
- huff/models.py,sha256=ilDSe9jdpOz75GJ3m8fKl2JOUpIOXv1pM2HVgj5_TTs,57180
4
- huff/ors.py,sha256=fyh-82nddvwCVpn1nZ3J0pZa6VrshUDeZSxm0aXvSiI,11467
2
+ huff/gistools.py,sha256=yEinx-rq9o8yXW6CgVIVsb1Rwi0WKnvCDRGD52h8jxo,6857
3
+ huff/models.py,sha256=hqR3jcFBD2h3SPX_U7YApQrql7yBUw2frmLF4uhVTfI,59645
4
+ huff/ors.py,sha256=D38PN2Az9kUbAqsFpb4W5Z5pSkbKSOH1W5IBlhos7ZA,11570
5
+ huff/osm.py,sha256=mj9FFxieMd5Gv8Nn72nt89dHSfzZYGQt2dYHOWfx6hY,5464
5
6
  huff/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
- huff/tests/tests_huff.py,sha256=hsB81I9aJGuBT-NHYrV62U6UB6lWAV13WbOM3tSRvu4,4039
7
+ huff/tests/tests_huff.py,sha256=kWUob6X7DvznSo6Nh9yaffkbdEt7XUK1s-JOVZBLz2g,5588
7
8
  huff/tests/data/Haslach.cpg,sha256=OtMDH1UDpEBK-CUmLugjLMBNTqZoPULF3QovKiesmCQ,5
8
9
  huff/tests/data/Haslach.dbf,sha256=GVPIt05OzDO7UrRDcsMhiYWvyXAPg6Z-qkiysFzj-fc,506
9
10
  huff/tests/data/Haslach.prj,sha256=2Jy1Vlzh7UxQ1MXpZ9UYLs2SxfrObj2xkEkZyLqmGTY,437
@@ -17,7 +18,7 @@ huff/tests/data/Haslach_supermarkets.qmd,sha256=j9i4_Pz7ZMSG2UDSb3nuhJpw0KWXIRhi
17
18
  huff/tests/data/Haslach_supermarkets.shp,sha256=X7QbQ0BTMag_B-bDRbpr-go2BQIXo3Y8zMAKpYZmlps,324
18
19
  huff/tests/data/Haslach_supermarkets.shx,sha256=j23QHX-SmdAeN04rw0x8nUOran-OCg_T6r_LvzzEPWs,164
19
20
  huff/tests/data/Wieland2015.xlsx,sha256=SaVM-Hi5dBTmf2bzszMnZ2Ec8NUE05S_5F2lQj0ayS0,19641
20
- huff-1.2.0.dist-info/METADATA,sha256=U79cQVxYruPuRBCeQ8sZ9Xysf3sTnOJY8WtZt4qxL6k,2793
21
- huff-1.2.0.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
22
- huff-1.2.0.dist-info/top_level.txt,sha256=nlzX-PxZNFmIxANIJMySuIFPihd6qOBkRlhIC28NEsQ,5
23
- huff-1.2.0.dist-info/RECORD,,
21
+ huff-1.3.0.dist-info/METADATA,sha256=4qAsnzZ3lkPANRMjBrkPhj61HoBfC7cHkambNIZjSfI,3884
22
+ huff-1.3.0.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
23
+ huff-1.3.0.dist-info/top_level.txt,sha256=nlzX-PxZNFmIxANIJMySuIFPihd6qOBkRlhIC28NEsQ,5
24
+ huff-1.3.0.dist-info/RECORD,,
File without changes