huff 1.1.0__py3-none-any.whl → 1.1.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- huff/gistools.py +50 -3
- huff/models.py +200 -48
- huff/ors.py +21 -20
- huff/tests/tests_huff.py +8 -2
- huff-1.1.2.dist-info/METADATA +53 -0
- {huff-1.1.0.dist-info → huff-1.1.2.dist-info}/RECORD +8 -8
- huff-1.1.0.dist-info/METADATA +0 -26
- {huff-1.1.0.dist-info → huff-1.1.2.dist-info}/WHEEL +0 -0
- {huff-1.1.0.dist-info → huff-1.1.2.dist-info}/top_level.txt +0 -0
huff/gistools.py
CHANGED
@@ -4,13 +4,60 @@
|
|
4
4
|
# Author: Thomas Wieland
|
5
5
|
# ORCID: 0000-0001-5168-9846
|
6
6
|
# mail: geowieland@googlemail.com
|
7
|
-
# Version: 1.1.
|
8
|
-
# Last update: 2025-
|
7
|
+
# Version: 1.1.2
|
8
|
+
# Last update: 2025-05-03 13:30
|
9
9
|
# Copyright (c) 2025 Thomas Wieland
|
10
10
|
#-----------------------------------------------------------------------
|
11
11
|
|
12
12
|
|
13
|
-
import geopandas as gp
|
13
|
+
import geopandas as gp
|
14
|
+
from math import pi, sin, cos, acos
|
15
|
+
|
16
|
+
|
17
|
+
def distance_matrix(
|
18
|
+
sources: list,
|
19
|
+
destinations: list,
|
20
|
+
unit: str = "m",
|
21
|
+
):
|
22
|
+
|
23
|
+
def euclidean_distance (
|
24
|
+
source: list,
|
25
|
+
destination: list,
|
26
|
+
unit: str = "m"
|
27
|
+
):
|
28
|
+
|
29
|
+
lon1 = source[0]
|
30
|
+
lat1 = source[1]
|
31
|
+
lon2 = destination[0]
|
32
|
+
lat2 = destination[1]
|
33
|
+
|
34
|
+
lat1_r = lat1*pi/180
|
35
|
+
lon1_r = lon1*pi/180
|
36
|
+
lat2_r = lat2*pi/180
|
37
|
+
lon2_r = lon2*pi/180
|
38
|
+
|
39
|
+
distance = 6378 * (acos(sin(lat1_r) * sin(lat2_r) + cos(lat1_r) * cos(lat2_r) * cos(lon2_r - lon1_r)))
|
40
|
+
if unit == "m":
|
41
|
+
distance = distance*1000
|
42
|
+
if unit == "mile":
|
43
|
+
distance = distance/1.60934
|
44
|
+
|
45
|
+
return distance
|
46
|
+
|
47
|
+
matrix = []
|
48
|
+
|
49
|
+
for source in sources:
|
50
|
+
row = []
|
51
|
+
for destination in destinations:
|
52
|
+
dist = euclidean_distance(
|
53
|
+
source,
|
54
|
+
destination,
|
55
|
+
unit
|
56
|
+
)
|
57
|
+
row.append(dist)
|
58
|
+
matrix.append(row)
|
59
|
+
|
60
|
+
return matrix
|
14
61
|
|
15
62
|
|
16
63
|
def overlay_difference(
|
huff/models.py
CHANGED
@@ -4,8 +4,8 @@
|
|
4
4
|
# Author: Thomas Wieland
|
5
5
|
# ORCID: 0000-0001-5168-9846
|
6
6
|
# mail: geowieland@googlemail.com
|
7
|
-
# Version: 1.1.
|
8
|
-
# Last update: 2025-
|
7
|
+
# Version: 1.1.2
|
8
|
+
# Last update: 2025-05-03 13:29
|
9
9
|
# Copyright (c) 2025 Thomas Wieland
|
10
10
|
#-----------------------------------------------------------------------
|
11
11
|
|
@@ -13,7 +13,9 @@
|
|
13
13
|
import pandas as pd
|
14
14
|
import geopandas as gp
|
15
15
|
import numpy as np
|
16
|
-
|
16
|
+
import time
|
17
|
+
from huff.ors import Client, TimeDistanceMatrix, Isochrone
|
18
|
+
from huff.gistools import overlay_difference, distance_matrix
|
17
19
|
|
18
20
|
|
19
21
|
class CustomerOrigins:
|
@@ -212,6 +214,11 @@ class SupplyLocations:
|
|
212
214
|
new_destinations_gpd = new_destinations.get_geodata_gpd()
|
213
215
|
new_destinations_metadata = new_destinations.get_metadata()
|
214
216
|
|
217
|
+
if list(new_destinations_gpd_original.columns) != list(geodata_gpd_original.columns):
|
218
|
+
raise KeyError("Supply locations and new destinations data have different column names.")
|
219
|
+
if list(new_destinations_gpd.columns) != list(geodata_gpd.columns):
|
220
|
+
raise KeyError("Supply locations and new destinations data have different column names.")
|
221
|
+
|
215
222
|
geodata_gpd_original = geodata_gpd_original.append(
|
216
223
|
new_destinations_gpd_original,
|
217
224
|
ignore_index=True
|
@@ -229,6 +236,89 @@ class SupplyLocations:
|
|
229
236
|
self.metadata = metadata
|
230
237
|
|
231
238
|
return self
|
239
|
+
|
240
|
+
def isochrones(
|
241
|
+
self,
|
242
|
+
segments: list = [900, 600, 300],
|
243
|
+
range_type: str = "time",
|
244
|
+
intersections: str = "true",
|
245
|
+
profile: str = "driving-car",
|
246
|
+
donut: bool = True,
|
247
|
+
ors_server: str = "https://api.openrouteservice.org/v2/",
|
248
|
+
ors_auth: str = None,
|
249
|
+
timeout = 10,
|
250
|
+
delay = 1,
|
251
|
+
save_output: bool = True,
|
252
|
+
output_filepath: str = "isochrones.shp",
|
253
|
+
output_crs: str = "EPSG:4326"
|
254
|
+
):
|
255
|
+
|
256
|
+
geodata_gpd = self.get_geodata_gpd()
|
257
|
+
metadata = self.get_metadata()
|
258
|
+
|
259
|
+
coords = [(point.x, point.y) for point in geodata_gpd.geometry]
|
260
|
+
|
261
|
+
unique_id_col = metadata["unique_id"]
|
262
|
+
unique_id_values = geodata_gpd[unique_id_col].values
|
263
|
+
|
264
|
+
ors_client = Client(
|
265
|
+
server = ors_server,
|
266
|
+
auth = ors_auth
|
267
|
+
)
|
268
|
+
|
269
|
+
isochrones_gdf = gp.GeoDataFrame(columns=[unique_id_col, "geometry"])
|
270
|
+
|
271
|
+
i = 0
|
272
|
+
|
273
|
+
for x, y in coords:
|
274
|
+
|
275
|
+
isochrone_output = ors_client.isochrone(
|
276
|
+
locations = [[x, y]],
|
277
|
+
segments = segments,
|
278
|
+
range_type = range_type,
|
279
|
+
intersections = intersections,
|
280
|
+
profile = profile,
|
281
|
+
timeout = timeout,
|
282
|
+
save_output = False,
|
283
|
+
output_crs = output_crs
|
284
|
+
)
|
285
|
+
|
286
|
+
if isochrone_output.status_code != 200:
|
287
|
+
continue
|
288
|
+
|
289
|
+
isochrone_gdf = isochrone_output.get_isochrones_gdf()
|
290
|
+
|
291
|
+
if donut:
|
292
|
+
isochrone_gdf = overlay_difference(
|
293
|
+
polygon_gdf = isochrone_gdf,
|
294
|
+
sort_col = "segment"
|
295
|
+
)
|
296
|
+
|
297
|
+
time.sleep(delay)
|
298
|
+
|
299
|
+
isochrone_gdf[unique_id_col] = unique_id_values[i]
|
300
|
+
|
301
|
+
isochrones_gdf = pd.concat(
|
302
|
+
[
|
303
|
+
isochrones_gdf,
|
304
|
+
isochrone_gdf
|
305
|
+
],
|
306
|
+
ignore_index=True
|
307
|
+
)
|
308
|
+
|
309
|
+
i = i+1
|
310
|
+
|
311
|
+
isochrones_gdf.set_crs(
|
312
|
+
output_crs,
|
313
|
+
allow_override=True,
|
314
|
+
inplace=True
|
315
|
+
)
|
316
|
+
|
317
|
+
if save_output:
|
318
|
+
|
319
|
+
isochrones_gdf.to_file(filename = output_filepath)
|
320
|
+
|
321
|
+
return isochrones_gdf
|
232
322
|
|
233
323
|
class InteractionMatrix:
|
234
324
|
|
@@ -283,13 +373,19 @@ class InteractionMatrix:
|
|
283
373
|
|
284
374
|
def transport_costs(
|
285
375
|
self,
|
376
|
+
network: bool = True,
|
286
377
|
range_type: str = "time",
|
287
378
|
time_unit: str = "minutes",
|
379
|
+
distance_unit: str = "kilometers",
|
288
380
|
ors_server: str = "https://api.openrouteservice.org/v2/",
|
289
381
|
ors_auth: str = None,
|
290
382
|
save_output: bool = False,
|
291
383
|
output_filepath: str = "transport_costs_matrix.csv"
|
292
384
|
):
|
385
|
+
|
386
|
+
if not network and range_type == "time":
|
387
|
+
print ("Calculating euclidean distances (network = False). Setting range_type = 'distance'")
|
388
|
+
range_type = "distance"
|
293
389
|
|
294
390
|
interaction_matrix_df = self.get_interaction_matrix_df()
|
295
391
|
|
@@ -311,53 +407,70 @@ class InteractionMatrix:
|
|
311
407
|
|
312
408
|
customer_origins_index = list(range(len(customer_origins_coords)))
|
313
409
|
locations_coords_index = list(range(len(customer_origins_index), len(locations_coords)))
|
314
|
-
|
315
|
-
ors_client = Client(
|
316
|
-
server = ors_server,
|
317
|
-
auth = ors_auth
|
318
|
-
)
|
319
|
-
time_distance_matrix = ors_client.matrix(
|
320
|
-
locations = locations_coords,
|
321
|
-
save_output = save_output,
|
322
|
-
output_filepath = output_filepath,
|
323
|
-
sources = customer_origins_index,
|
324
|
-
destinations = locations_coords_index,
|
325
|
-
range_type = range_type
|
326
|
-
)
|
327
|
-
|
328
|
-
if time_distance_matrix.get_metadata() is None:
|
329
|
-
raise ValueError ("No transport costs matrix was built.")
|
330
410
|
|
331
|
-
|
332
|
-
|
333
|
-
|
411
|
+
if network:
|
412
|
+
|
413
|
+
ors_client = Client(
|
414
|
+
server = ors_server,
|
415
|
+
auth = ors_auth
|
416
|
+
)
|
417
|
+
time_distance_matrix = ors_client.matrix(
|
418
|
+
locations = locations_coords,
|
419
|
+
save_output = save_output,
|
420
|
+
output_filepath = output_filepath,
|
421
|
+
sources = customer_origins_index,
|
422
|
+
destinations = locations_coords_index,
|
423
|
+
range_type = range_type
|
424
|
+
)
|
425
|
+
|
426
|
+
if time_distance_matrix.get_metadata() is None:
|
427
|
+
raise ValueError ("No transport costs matrix was built.")
|
428
|
+
|
429
|
+
transport_costs_matrix = time_distance_matrix.get_matrix()
|
430
|
+
transport_costs_matrix_config = time_distance_matrix.get_config()
|
431
|
+
range_type = transport_costs_matrix_config["range_type"]
|
432
|
+
|
433
|
+
transport_costs_matrix["source"] = transport_costs_matrix["source"].astype(int)
|
434
|
+
transport_costs_matrix["source"] = transport_costs_matrix["source"].map(
|
435
|
+
dict(enumerate(customer_origins_ids))
|
436
|
+
)
|
437
|
+
|
438
|
+
transport_costs_matrix["destination"] = transport_costs_matrix["destination"].astype(int)
|
439
|
+
transport_costs_matrix["destination"] = transport_costs_matrix["destination"].map(
|
440
|
+
dict(enumerate(supply_locations_ids))
|
441
|
+
)
|
442
|
+
|
443
|
+
transport_costs_matrix["source_destination"] = transport_costs_matrix["source"].astype(str)+"_"+transport_costs_matrix["destination"].astype(str)
|
444
|
+
transport_costs_matrix = transport_costs_matrix[["source_destination", range_type]]
|
445
|
+
|
446
|
+
interaction_matrix_df = interaction_matrix_df.merge(
|
447
|
+
transport_costs_matrix,
|
448
|
+
left_on="ij",
|
449
|
+
right_on="source_destination"
|
450
|
+
)
|
451
|
+
|
452
|
+
interaction_matrix_df["t_ij"] = interaction_matrix_df[range_type]
|
453
|
+
if time_unit == "minutes":
|
454
|
+
interaction_matrix_df["t_ij"] = interaction_matrix_df["t_ij"]/60
|
455
|
+
if time_unit == "hours":
|
456
|
+
interaction_matrix_df["t_ij"] = interaction_matrix_df["t_ij"]/60/60
|
457
|
+
|
458
|
+
interaction_matrix_df = interaction_matrix_df.drop(columns=["source_destination", range_type])
|
334
459
|
|
335
|
-
|
336
|
-
transport_costs_matrix["source"] = transport_costs_matrix["source"].map(
|
337
|
-
dict(enumerate(customer_origins_ids))
|
338
|
-
)
|
339
|
-
|
340
|
-
transport_costs_matrix["destination"] = transport_costs_matrix["destination"].astype(int)
|
341
|
-
transport_costs_matrix["destination"] = transport_costs_matrix["destination"].map(
|
342
|
-
dict(enumerate(supply_locations_ids))
|
343
|
-
)
|
344
|
-
|
345
|
-
transport_costs_matrix["source_destination"] = transport_costs_matrix["source"].astype(str)+"_"+transport_costs_matrix["destination"].astype(str)
|
346
|
-
transport_costs_matrix = transport_costs_matrix[["source_destination", range_type]]
|
460
|
+
else:
|
347
461
|
|
348
|
-
|
349
|
-
|
350
|
-
|
351
|
-
|
352
|
-
|
353
|
-
|
354
|
-
|
355
|
-
if time_unit == "minutes":
|
356
|
-
interaction_matrix_df["t_ij"] = interaction_matrix_df["t_ij"]/60
|
357
|
-
if time_unit == "hours":
|
358
|
-
interaction_matrix_df["t_ij"] = interaction_matrix_df["t_ij"]/60/60
|
462
|
+
distance_matrix_result = distance_matrix(
|
463
|
+
sources = customer_origins_coords,
|
464
|
+
destinations = supply_locations_coords,
|
465
|
+
unit = "m"
|
466
|
+
)
|
467
|
+
|
468
|
+
distance_matrix_result_flat = [distance for sublist in distance_matrix_result for distance in sublist]
|
359
469
|
|
360
|
-
|
470
|
+
interaction_matrix_df["t_ij"] = distance_matrix_result_flat
|
471
|
+
|
472
|
+
if distance_unit == "kilometers":
|
473
|
+
interaction_matrix_df["t_ij"] = interaction_matrix_df["t_ij"]/1000
|
361
474
|
|
362
475
|
self.interaction_matrix_df = interaction_matrix_df
|
363
476
|
|
@@ -372,6 +485,11 @@ class InteractionMatrix:
|
|
372
485
|
if interaction_matrix_df["A_j"].isna().all():
|
373
486
|
raise ValueError ("Attraction variable is not defined")
|
374
487
|
|
488
|
+
check_vars(
|
489
|
+
df = interaction_matrix_df,
|
490
|
+
cols = ["A_j", "t_ij"]
|
491
|
+
)
|
492
|
+
|
375
493
|
customer_origins = self.customer_origins
|
376
494
|
customer_origins_metadata = customer_origins.get_metadata()
|
377
495
|
tc_weighting = customer_origins_metadata["weighting"][0]
|
@@ -433,6 +551,11 @@ class InteractionMatrix:
|
|
433
551
|
if interaction_matrix_df["C_i"].isna().all():
|
434
552
|
raise ValueError ("Market size column in customer origins not defined. Use CustomerOrigins.define_marketsize()")
|
435
553
|
|
554
|
+
check_vars(
|
555
|
+
df = interaction_matrix_df,
|
556
|
+
cols = ["C_i"]
|
557
|
+
)
|
558
|
+
|
436
559
|
if interaction_matrix_df["p_ij"].isna().all():
|
437
560
|
self.probabilities()
|
438
561
|
interaction_matrix_df = self.interaction_matrix_df
|
@@ -446,7 +569,12 @@ class InteractionMatrix:
|
|
446
569
|
def marketareas (self):
|
447
570
|
|
448
571
|
interaction_matrix_df = self.interaction_matrix_df
|
449
|
-
|
572
|
+
|
573
|
+
check_vars(
|
574
|
+
df = interaction_matrix_df,
|
575
|
+
cols = ["E_ij"]
|
576
|
+
)
|
577
|
+
|
450
578
|
market_areas_df = pd.DataFrame(interaction_matrix_df.groupby("j")["E_ij"].sum())
|
451
579
|
market_areas_df = market_areas_df.reset_index(drop=False)
|
452
580
|
market_areas_df = market_areas_df.rename(columns={"E_ij": "T_j"})
|
@@ -460,8 +588,10 @@ class InteractionMatrix:
|
|
460
588
|
|
461
589
|
def mci_transformation(
|
462
590
|
self,
|
463
|
-
cols = ["A_j", "t_ij"]
|
591
|
+
cols: list = ["A_j", "t_ij"]
|
464
592
|
):
|
593
|
+
|
594
|
+
""" MCI model log-centering transformation """
|
465
595
|
|
466
596
|
cols = cols + ["p_ij"]
|
467
597
|
|
@@ -700,12 +830,34 @@ def create_interaction_matrix(
|
|
700
830
|
|
701
831
|
return interaction_matrix
|
702
832
|
|
833
|
+
def check_vars(
|
834
|
+
df: pd.DataFrame,
|
835
|
+
cols: list
|
836
|
+
):
|
837
|
+
|
838
|
+
for col in cols:
|
839
|
+
if col not in df.columns:
|
840
|
+
raise KeyError(f"Column '{col}' not in dataframe.")
|
841
|
+
|
842
|
+
for col in cols:
|
843
|
+
if not pd.api.types.is_numeric_dtype(df[col]):
|
844
|
+
raise ValueError(f"Column '{col}' is not numeric. All columns must be numeric.")
|
845
|
+
|
846
|
+
for col in cols:
|
847
|
+
if (df[col] <= 0).any():
|
848
|
+
raise ValueError(f"Column '{col}' includes values <= 0. All values must be numeric and positive.")
|
849
|
+
|
703
850
|
def mci_transformation(
|
704
851
|
df: pd.DataFrame,
|
705
852
|
ref_col: str,
|
706
853
|
cols: list
|
707
854
|
):
|
708
855
|
|
856
|
+
check_vars(
|
857
|
+
df = df,
|
858
|
+
cols = cols + [ref_col]
|
859
|
+
)
|
860
|
+
|
709
861
|
def lct (x):
|
710
862
|
|
711
863
|
x_geom = np.exp(np.log(x).mean())
|
huff/ors.py
CHANGED
@@ -4,8 +4,8 @@
|
|
4
4
|
# Author: Thomas Wieland
|
5
5
|
# ORCID: 0000-0001-5168-9846
|
6
6
|
# mail: geowieland@googlemail.com
|
7
|
-
# Version: 1.1.
|
8
|
-
# Last update: 2025-
|
7
|
+
# Version: 1.1.2
|
8
|
+
# Last update: 2025-05-03 13:33
|
9
9
|
# Copyright (c) 2025 Thomas Wieland
|
10
10
|
#-----------------------------------------------------------------------
|
11
11
|
|
@@ -32,6 +32,11 @@ class Isochrone:
|
|
32
32
|
self.status_code = status_code
|
33
33
|
self.save_config = save_config
|
34
34
|
|
35
|
+
def get_isochrones_gdf(self):
|
36
|
+
|
37
|
+
isochrones_gdf = self.isochrones_gdf
|
38
|
+
return isochrones_gdf
|
39
|
+
|
35
40
|
def summary(self):
|
36
41
|
|
37
42
|
metadata = self.metadata
|
@@ -119,10 +124,10 @@ class Client:
|
|
119
124
|
range_type: str = "time",
|
120
125
|
intersections: str = "true",
|
121
126
|
profile: str = "driving-car",
|
127
|
+
timeout = 10,
|
122
128
|
save_output: bool = True,
|
123
129
|
output_filepath: str = "isochrones.shp",
|
124
|
-
output_crs: str = "EPSG:
|
125
|
-
verbose: bool = True
|
130
|
+
output_crs: str = "EPSG:4326"
|
126
131
|
):
|
127
132
|
|
128
133
|
ors_url = self.server + "isochrones/" + profile
|
@@ -153,13 +158,13 @@ class Client:
|
|
153
158
|
response = requests.post(
|
154
159
|
ors_url,
|
155
160
|
headers=headers,
|
156
|
-
json=body
|
161
|
+
json=body,
|
162
|
+
timeout=timeout
|
157
163
|
)
|
158
164
|
|
159
165
|
except:
|
160
166
|
|
161
|
-
|
162
|
-
print ("Unknown error while accessing ORS server")
|
167
|
+
print ("Unknown error while accessing ORS server")
|
163
168
|
|
164
169
|
status_code = 99999
|
165
170
|
isochrones_gdf = None
|
@@ -178,8 +183,7 @@ class Client:
|
|
178
183
|
|
179
184
|
if status_code == 200:
|
180
185
|
|
181
|
-
|
182
|
-
print ("Accessing ORS server successful")
|
186
|
+
print ("Accessing ORS server successful")
|
183
187
|
|
184
188
|
response_json = response.json()
|
185
189
|
|
@@ -209,8 +213,7 @@ class Client:
|
|
209
213
|
|
210
214
|
else:
|
211
215
|
|
212
|
-
|
213
|
-
print ("Error while accessing ORS server. Status Code: " + str(status_code))
|
216
|
+
print ("Error while accessing ORS server. Status Code: " + str(status_code))
|
214
217
|
|
215
218
|
isochrones_gdf = None
|
216
219
|
metadata = None
|
@@ -235,12 +238,12 @@ class Client:
|
|
235
238
|
metrics: list = [],
|
236
239
|
resolve_locations: bool = False,
|
237
240
|
units: str = "mi",
|
241
|
+
timeout = 10,
|
238
242
|
save_output = False,
|
239
243
|
output_filepath = "matrix.csv",
|
240
244
|
csv_sep = ";",
|
241
245
|
csv_decimal = ",",
|
242
|
-
csv_encoding = None
|
243
|
-
verbose = True
|
246
|
+
csv_encoding = None
|
244
247
|
):
|
245
248
|
|
246
249
|
ors_url = self.server + "matrix/" + profile
|
@@ -278,13 +281,13 @@ class Client:
|
|
278
281
|
response = requests.post(
|
279
282
|
ors_url,
|
280
283
|
headers=headers,
|
281
|
-
json=body
|
284
|
+
json=body,
|
285
|
+
timeout=timeout
|
282
286
|
)
|
283
287
|
|
284
288
|
except:
|
285
289
|
|
286
|
-
|
287
|
-
print ("Unknown error while accessing ORS server")
|
290
|
+
print ("Unknown error while accessing ORS server")
|
288
291
|
|
289
292
|
status_code = 99999
|
290
293
|
matrix_df = None
|
@@ -303,8 +306,7 @@ class Client:
|
|
303
306
|
|
304
307
|
if status_code == 200:
|
305
308
|
|
306
|
-
|
307
|
-
print ("Accessing ORS server successful")
|
309
|
+
print ("Accessing ORS server successful")
|
308
310
|
|
309
311
|
response_json = response.json()
|
310
312
|
|
@@ -366,8 +368,7 @@ class Client:
|
|
366
368
|
|
367
369
|
else:
|
368
370
|
|
369
|
-
|
370
|
-
print ("Error in accessing ORS server. Status Code: " + str(status_code))
|
371
|
+
print ("Error in accessing ORS server. Status Code: " + str(status_code))
|
371
372
|
|
372
373
|
matrix_df = None
|
373
374
|
metadata = None
|
huff/tests/tests_huff.py
CHANGED
@@ -4,8 +4,8 @@
|
|
4
4
|
# Author: Thomas Wieland
|
5
5
|
# ORCID: 0000-0001-5168-9846
|
6
6
|
# mail: geowieland@googlemail.com
|
7
|
-
# Version: 1.1.
|
8
|
-
# Last update: 2025-
|
7
|
+
# Version: 1.1.2
|
8
|
+
# Last update: 2025-05-03 13:32
|
9
9
|
# Copyright (c) 2025 Thomas Wieland
|
10
10
|
#-----------------------------------------------------------------------
|
11
11
|
|
@@ -69,6 +69,12 @@ Haslach_supermarkets = load_geodata(
|
|
69
69
|
|
70
70
|
Haslach_supermarkets.summary()
|
71
71
|
|
72
|
+
Haslach_supermarkets.isochrones(
|
73
|
+
save_output=True,
|
74
|
+
ors_auth = "5b3ce3597851110001cf62480a15aafdb5a64f4d91805929f8af6abd",
|
75
|
+
output_filepath="Haslach_supermarkets_iso.shp"
|
76
|
+
)
|
77
|
+
|
72
78
|
Haslach_supermarkets.define_attraction("VKF_qm")
|
73
79
|
|
74
80
|
Haslach_supermarkets.define_attraction_weighting(
|
@@ -0,0 +1,53 @@
|
|
1
|
+
Metadata-Version: 2.1
|
2
|
+
Name: huff
|
3
|
+
Version: 1.1.2
|
4
|
+
Summary: huff: Huff Model Market Area Analysis
|
5
|
+
Author: Thomas Wieland
|
6
|
+
Author-email: geowieland@googlemail.com
|
7
|
+
Description-Content-Type: text/markdown
|
8
|
+
Requires-Dist: geopandas
|
9
|
+
Requires-Dist: pandas
|
10
|
+
Requires-Dist: numpy
|
11
|
+
|
12
|
+
# huff: Huff Model Market Area Analysis
|
13
|
+
|
14
|
+
## Author
|
15
|
+
|
16
|
+
Thomas Wieland [ORCID](https://orcid.org/0000-0001-5168-9846) [EMail](mailto:geowieland@googlemail.com)
|
17
|
+
|
18
|
+
See the /tests directory for usage examples of most of the included functions.
|
19
|
+
|
20
|
+
|
21
|
+
## Features
|
22
|
+
|
23
|
+
- **Huff Model**:
|
24
|
+
- Defining origins and destinations with weightings
|
25
|
+
- Creating interaction matrix from origins and destinations
|
26
|
+
- Calculating basic Huff Model
|
27
|
+
- **Multiplicative Competitive Interaction Model**:
|
28
|
+
- Log-centering transformation
|
29
|
+
- **OpenRouteService Client** (Tools via API):
|
30
|
+
- Creating transport costs matrix from origins and destinations
|
31
|
+
- Creating isochrones from destinations
|
32
|
+
|
33
|
+
Attribution of OpenRouteService:
|
34
|
+
© openrouteservice.org by HeiGIT | Map data © OpenStreetMap contributors
|
35
|
+
|
36
|
+
Visit https://openrouteservice.org/
|
37
|
+
|
38
|
+
## Literature
|
39
|
+
- Huff DL (1962) *Determination of Intra-Urban Retail Trade Areas*.
|
40
|
+
- Huff DL (1964) Defining and estimating a trading area. *Journal of Marketing* 28(4): 34–38. [10.2307/1249154](https://doi.org/10.2307/1249154)
|
41
|
+
- Huff DL, McCallum BM (2008) Calibrating the Huff Model using ArcGIS Business Analyst. ESRI White Paper, September 2008. https://www.esri.com/library/whitepapers/pdfs/calibrating-huff-model.pdf.
|
42
|
+
- De Beule M, Van den Poel D, Van de Weghe N (2014) An extended Huff-model for robustly benchmarking and predicting retail network performance. *Applied Geography*,* 46(1): 80–89. [10.1016/j.apgeog.2013.09.026](https://doi.org/10.1016/j.apgeog.2013.09.026)
|
43
|
+
- Nakanishi M, Cooper LG (1974) Parameter estimation for a Multiplicative Competitive Interaction Model: Least squares approach. *Journal of Marketing Research* 11(3): 303–311. [10.2307/3151146](https://doi.org/10.2307/3151146).
|
44
|
+
- Wieland T (2017) Market Area Analysis for Retail and Service Locations with MCI. *R Journal* 9(1): 298-323. [10.32614/RJ-2017-020](https://doi.org/10.32614/RJ-2017-020)
|
45
|
+
- Wieland T (2018) A Hurdle Model Approach of Store Choice and Market Area Analysis in Grocery Retailing. *Papers in Applied Geography* 4(4): 370-389. [10.1080/23754931.2018.1519458](https://doi.org/10.1080/23754931.2018.1519458)
|
46
|
+
|
47
|
+
|
48
|
+
## Installation
|
49
|
+
|
50
|
+
To install the package, use `pip`:
|
51
|
+
|
52
|
+
```bash
|
53
|
+
pip install huff
|
@@ -1,9 +1,9 @@
|
|
1
1
|
huff/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
2
|
-
huff/gistools.py,sha256=
|
3
|
-
huff/models.py,sha256=
|
4
|
-
huff/ors.py,sha256=
|
2
|
+
huff/gistools.py,sha256=vuEpNC-IEIrNtzptdjzyvOP05qFbJYfeHpPZfo_OMvs,2721
|
3
|
+
huff/models.py,sha256=yKty9d8nG05HSjNIAPiGNaDeEHLGySZhpNZJHnmPYdU,31300
|
4
|
+
huff/ors.py,sha256=dkuVj14Jr69D2xp8NSi2QDkXNNLrudUs9f-i_UtKOdQ,11467
|
5
5
|
huff/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
6
|
-
huff/tests/tests_huff.py,sha256=
|
6
|
+
huff/tests/tests_huff.py,sha256=4AbQcD46iG7IsFioigVwRwOyeHMniYELIspMh-rCEHk,2515
|
7
7
|
huff/tests/data/Haslach.cpg,sha256=OtMDH1UDpEBK-CUmLugjLMBNTqZoPULF3QovKiesmCQ,5
|
8
8
|
huff/tests/data/Haslach.dbf,sha256=GVPIt05OzDO7UrRDcsMhiYWvyXAPg6Z-qkiysFzj-fc,506
|
9
9
|
huff/tests/data/Haslach.prj,sha256=2Jy1Vlzh7UxQ1MXpZ9UYLs2SxfrObj2xkEkZyLqmGTY,437
|
@@ -16,7 +16,7 @@ huff/tests/data/Haslach_supermarkets.prj,sha256=2Jy1Vlzh7UxQ1MXpZ9UYLs2SxfrObj2x
|
|
16
16
|
huff/tests/data/Haslach_supermarkets.qmd,sha256=j9i4_Pz7ZMSG2UDSb3nuhJpw0KWXIRhiiDymqJP6_Fo,2479
|
17
17
|
huff/tests/data/Haslach_supermarkets.shp,sha256=X7QbQ0BTMag_B-bDRbpr-go2BQIXo3Y8zMAKpYZmlps,324
|
18
18
|
huff/tests/data/Haslach_supermarkets.shx,sha256=j23QHX-SmdAeN04rw0x8nUOran-OCg_T6r_LvzzEPWs,164
|
19
|
-
huff-1.1.
|
20
|
-
huff-1.1.
|
21
|
-
huff-1.1.
|
22
|
-
huff-1.1.
|
19
|
+
huff-1.1.2.dist-info/METADATA,sha256=xTrAKBys0WpSPbu7ojxZ-gSTs5l3zQ0uKlnmiK3gvfA,2541
|
20
|
+
huff-1.1.2.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
|
21
|
+
huff-1.1.2.dist-info/top_level.txt,sha256=nlzX-PxZNFmIxANIJMySuIFPihd6qOBkRlhIC28NEsQ,5
|
22
|
+
huff-1.1.2.dist-info/RECORD,,
|
huff-1.1.0.dist-info/METADATA
DELETED
@@ -1,26 +0,0 @@
|
|
1
|
-
Metadata-Version: 2.1
|
2
|
-
Name: huff
|
3
|
-
Version: 1.1.0
|
4
|
-
Summary: huff: Huff Model Market Area Analysis
|
5
|
-
Author: Thomas Wieland
|
6
|
-
Author-email: geowieland@googlemail.com
|
7
|
-
Description-Content-Type: text/markdown
|
8
|
-
Requires-Dist: geopandas
|
9
|
-
Requires-Dist: pandas
|
10
|
-
Requires-Dist: numpy
|
11
|
-
|
12
|
-
# huff: Huff Model Market Area Analysis
|
13
|
-
|
14
|
-
## Author
|
15
|
-
|
16
|
-
Thomas Wieland [ORCID](https://orcid.org/0000-0001-5168-9846) [EMail](mailto:geowieland@googlemail.com)
|
17
|
-
|
18
|
-
See the /tests directory for usage examples of most of the included functions.
|
19
|
-
|
20
|
-
|
21
|
-
## Installation
|
22
|
-
|
23
|
-
To install the package, use `pip`:
|
24
|
-
|
25
|
-
```bash
|
26
|
-
pip install huff
|
File without changes
|
File without changes
|