huff 1.2.0__tar.gz → 1.3.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- huff-1.3.1/PKG-INFO +68 -0
- huff-1.3.1/README.md +60 -0
- huff-1.3.1/huff/gistools.py +216 -0
- {huff-1.2.0 → huff-1.3.1}/huff/models.py +134 -30
- {huff-1.2.0 → huff-1.3.1}/huff/ors.py +23 -18
- huff-1.3.1/huff/osm.py +250 -0
- {huff-1.2.0 → huff-1.3.1}/huff/tests/data/Haslach.qmd +43 -43
- {huff-1.2.0 → huff-1.3.1}/huff/tests/data/Haslach_supermarkets.qmd +43 -43
- huff-1.3.1/huff/tests/tests_huff.py +260 -0
- huff-1.3.1/huff.egg-info/PKG-INFO +68 -0
- {huff-1.2.0 → huff-1.3.1}/huff.egg-info/SOURCES.txt +1 -0
- huff-1.3.1/huff.egg-info/requires.txt +10 -0
- {huff-1.2.0 → huff-1.3.1}/setup.py +6 -3
- huff-1.2.0/PKG-INFO +0 -52
- huff-1.2.0/README.md +0 -44
- huff-1.2.0/huff/gistools.py +0 -96
- huff-1.2.0/huff/tests/tests_huff.py +0 -153
- huff-1.2.0/huff.egg-info/PKG-INFO +0 -52
- huff-1.2.0/huff.egg-info/requires.txt +0 -7
- {huff-1.2.0 → huff-1.3.1}/MANIFEST.in +0 -0
- {huff-1.2.0 → huff-1.3.1}/huff/__init__.py +0 -0
- {huff-1.2.0 → huff-1.3.1}/huff/tests/__init__.py +0 -0
- {huff-1.2.0 → huff-1.3.1}/huff/tests/data/Haslach.cpg +0 -0
- {huff-1.2.0 → huff-1.3.1}/huff/tests/data/Haslach.dbf +0 -0
- {huff-1.2.0 → huff-1.3.1}/huff/tests/data/Haslach.prj +0 -0
- {huff-1.2.0 → huff-1.3.1}/huff/tests/data/Haslach.shp +0 -0
- {huff-1.2.0 → huff-1.3.1}/huff/tests/data/Haslach.shx +0 -0
- {huff-1.2.0 → huff-1.3.1}/huff/tests/data/Haslach_supermarkets.cpg +0 -0
- {huff-1.2.0 → huff-1.3.1}/huff/tests/data/Haslach_supermarkets.dbf +0 -0
- {huff-1.2.0 → huff-1.3.1}/huff/tests/data/Haslach_supermarkets.prj +0 -0
- {huff-1.2.0 → huff-1.3.1}/huff/tests/data/Haslach_supermarkets.shp +0 -0
- {huff-1.2.0 → huff-1.3.1}/huff/tests/data/Haslach_supermarkets.shx +0 -0
- {huff-1.2.0 → huff-1.3.1}/huff/tests/data/Wieland2015.xlsx +0 -0
- {huff-1.2.0 → huff-1.3.1}/huff.egg-info/dependency_links.txt +0 -0
- {huff-1.2.0 → huff-1.3.1}/huff.egg-info/top_level.txt +0 -0
- {huff-1.2.0 → huff-1.3.1}/setup.cfg +0 -0
huff-1.3.1/PKG-INFO
ADDED
@@ -0,0 +1,68 @@
|
|
1
|
+
Metadata-Version: 2.1
|
2
|
+
Name: huff
|
3
|
+
Version: 1.3.1
|
4
|
+
Summary: huff: Huff Model Market Area Analysis
|
5
|
+
Author: Thomas Wieland
|
6
|
+
Author-email: geowieland@googlemail.com
|
7
|
+
Description-Content-Type: text/markdown
|
8
|
+
|
9
|
+
# huff: Huff Model Market Area Analysis
|
10
|
+
|
11
|
+
This Python library is designed for performing market area analyses with the Huff Model (Huff 1962, 1964) and/or the Multiplicative Competitive Interaction (MCI) Model (Nakanishi and Cooper 1974, 1982). Users may load point shapefiles (or CSV, XLSX) of customer origins and supply locations and conduct a market area analysis step by step. The package also includes supplementary GIS functions, including clients for OpenRouteService(1) for network analysis (e.g., transport cost matrix) and OpenStreetMap(2) for simple maps. See Huff and McCallum (2008) or Wieland (2017) for a description of the models and their practical application.
|
12
|
+
|
13
|
+
|
14
|
+
## Author
|
15
|
+
|
16
|
+
Thomas Wieland [ORCID](https://orcid.org/0000-0001-5168-9846) [EMail](mailto:geowieland@googlemail.com)
|
17
|
+
|
18
|
+
See the /tests directory for usage examples of most of the included functions.
|
19
|
+
|
20
|
+
|
21
|
+
## Features
|
22
|
+
|
23
|
+
- **Huff Model**:
|
24
|
+
- Defining origins and destinations with weightings
|
25
|
+
- Creating interaction matrix from origins and destinations
|
26
|
+
- Market simulation with basic Huff Model
|
27
|
+
- **Multiplicative Competitive Interaction Model**:
|
28
|
+
- Log-centering transformation of interaction matrix
|
29
|
+
- Fitting MCI model with >= 2 independent variables
|
30
|
+
- MCI model market simulation
|
31
|
+
- **GIS tools**:
|
32
|
+
- OpenRouteService(1) Client:
|
33
|
+
- Creating transport costs matrix from origins and destinations
|
34
|
+
- Creating isochrones from origins and destinations
|
35
|
+
- OpenStreetMap(2) Client:
|
36
|
+
- Creating simple maps with OSM basemap
|
37
|
+
- Other GIS tools:
|
38
|
+
- Creating buffers from geodata
|
39
|
+
- Spatial join with with statistics
|
40
|
+
- Creating euclidean distance matrix from origins and destinations
|
41
|
+
- Overlay-difference analysis of polygons
|
42
|
+
- **Data management tools**:
|
43
|
+
- Loading own interaction matrix for analysis
|
44
|
+
- Creating origins/destinations objects from point geodata
|
45
|
+
|
46
|
+
(1) © openrouteservice.org by HeiGIT | Map data © OpenStreetMap contributors | https://openrouteservice.org/
|
47
|
+
|
48
|
+
(2) © OpenStreetMap contributors | available under the Open Database License | https://www.openstreetmap.org/
|
49
|
+
|
50
|
+
|
51
|
+
## Literature
|
52
|
+
- Huff DL (1962) *Determination of Intra-Urban Retail Trade Areas*.
|
53
|
+
- Huff DL (1964) Defining and estimating a trading area. *Journal of Marketing* 28(4): 34–38. [10.2307/1249154](https://doi.org/10.2307/1249154)
|
54
|
+
- Huff DL, McCallum BM (2008) Calibrating the Huff Model using ArcGIS Business Analyst. ESRI White Paper, September 2008. https://www.esri.com/library/whitepapers/pdfs/calibrating-huff-model.pdf.
|
55
|
+
- De Beule M, Van den Poel D, Van de Weghe N (2014) An extended Huff-model for robustly benchmarking and predicting retail network performance. *Applied Geography* 46(1): 80–89. [10.1016/j.apgeog.2013.09.026](https://doi.org/10.1016/j.apgeog.2013.09.026)
|
56
|
+
- Nakanishi M, Cooper LG (1974) Parameter estimation for a Multiplicative Competitive Interaction Model: Least squares approach. *Journal of Marketing Research* 11(3): 303–311. [10.2307/3151146](https://doi.org/10.2307/3151146).
|
57
|
+
- Nakanishi M, Cooper LG (1982) Technical Note — Simplified Estimation Procedures for MCI Models. *Marketing Science* 1(3): 314-322. [10.1287/mksc.1.3.314](https://doi.org/10.1287/mksc.1.3.314)
|
58
|
+
- Wieland T (2017) Market Area Analysis for Retail and Service Locations with MCI. *R Journal* 9(1): 298-323. [10.32614/RJ-2017-020](https://doi.org/10.32614/RJ-2017-020)
|
59
|
+
- Wieland T (2018) A Hurdle Model Approach of Store Choice and Market Area Analysis in Grocery Retailing. *Papers in Applied Geography* 4(4): 370-389. [10.1080/23754931.2018.1519458](https://doi.org/10.1080/23754931.2018.1519458)
|
60
|
+
- Wieland T (2023) Spatial shopping behavior during the Corona pandemic: insights from a micro-econometric store choice model for consumer electronics and furniture retailing in Germany. *Journal of Geographical Systems* 25(2): 291–326. [10.1007/s10109-023-00408-x](https://doi.org/10.1007/s10109-023-00408-x)
|
61
|
+
|
62
|
+
|
63
|
+
## Installation
|
64
|
+
|
65
|
+
To install the package, use `pip`:
|
66
|
+
|
67
|
+
```bash
|
68
|
+
pip install huff
|
huff-1.3.1/README.md
ADDED
@@ -0,0 +1,60 @@
|
|
1
|
+
# huff: Huff Model Market Area Analysis
|
2
|
+
|
3
|
+
This Python library is designed for performing market area analyses with the Huff Model (Huff 1962, 1964) and/or the Multiplicative Competitive Interaction (MCI) Model (Nakanishi and Cooper 1974, 1982). Users may load point shapefiles (or CSV, XLSX) of customer origins and supply locations and conduct a market area analysis step by step. The package also includes supplementary GIS functions, including clients for OpenRouteService(1) for network analysis (e.g., transport cost matrix) and OpenStreetMap(2) for simple maps. See Huff and McCallum (2008) or Wieland (2017) for a description of the models and their practical application.
|
4
|
+
|
5
|
+
|
6
|
+
## Author
|
7
|
+
|
8
|
+
Thomas Wieland [ORCID](https://orcid.org/0000-0001-5168-9846) [EMail](mailto:geowieland@googlemail.com)
|
9
|
+
|
10
|
+
See the /tests directory for usage examples of most of the included functions.
|
11
|
+
|
12
|
+
|
13
|
+
## Features
|
14
|
+
|
15
|
+
- **Huff Model**:
|
16
|
+
- Defining origins and destinations with weightings
|
17
|
+
- Creating interaction matrix from origins and destinations
|
18
|
+
- Market simulation with basic Huff Model
|
19
|
+
- **Multiplicative Competitive Interaction Model**:
|
20
|
+
- Log-centering transformation of interaction matrix
|
21
|
+
- Fitting MCI model with >= 2 independent variables
|
22
|
+
- MCI model market simulation
|
23
|
+
- **GIS tools**:
|
24
|
+
- OpenRouteService(1) Client:
|
25
|
+
- Creating transport costs matrix from origins and destinations
|
26
|
+
- Creating isochrones from origins and destinations
|
27
|
+
- OpenStreetMap(2) Client:
|
28
|
+
- Creating simple maps with OSM basemap
|
29
|
+
- Other GIS tools:
|
30
|
+
- Creating buffers from geodata
|
31
|
+
- Spatial join with with statistics
|
32
|
+
- Creating euclidean distance matrix from origins and destinations
|
33
|
+
- Overlay-difference analysis of polygons
|
34
|
+
- **Data management tools**:
|
35
|
+
- Loading own interaction matrix for analysis
|
36
|
+
- Creating origins/destinations objects from point geodata
|
37
|
+
|
38
|
+
(1) © openrouteservice.org by HeiGIT | Map data © OpenStreetMap contributors | https://openrouteservice.org/
|
39
|
+
|
40
|
+
(2) © OpenStreetMap contributors | available under the Open Database License | https://www.openstreetmap.org/
|
41
|
+
|
42
|
+
|
43
|
+
## Literature
|
44
|
+
- Huff DL (1962) *Determination of Intra-Urban Retail Trade Areas*.
|
45
|
+
- Huff DL (1964) Defining and estimating a trading area. *Journal of Marketing* 28(4): 34–38. [10.2307/1249154](https://doi.org/10.2307/1249154)
|
46
|
+
- Huff DL, McCallum BM (2008) Calibrating the Huff Model using ArcGIS Business Analyst. ESRI White Paper, September 2008. https://www.esri.com/library/whitepapers/pdfs/calibrating-huff-model.pdf.
|
47
|
+
- De Beule M, Van den Poel D, Van de Weghe N (2014) An extended Huff-model for robustly benchmarking and predicting retail network performance. *Applied Geography* 46(1): 80–89. [10.1016/j.apgeog.2013.09.026](https://doi.org/10.1016/j.apgeog.2013.09.026)
|
48
|
+
- Nakanishi M, Cooper LG (1974) Parameter estimation for a Multiplicative Competitive Interaction Model: Least squares approach. *Journal of Marketing Research* 11(3): 303–311. [10.2307/3151146](https://doi.org/10.2307/3151146).
|
49
|
+
- Nakanishi M, Cooper LG (1982) Technical Note — Simplified Estimation Procedures for MCI Models. *Marketing Science* 1(3): 314-322. [10.1287/mksc.1.3.314](https://doi.org/10.1287/mksc.1.3.314)
|
50
|
+
- Wieland T (2017) Market Area Analysis for Retail and Service Locations with MCI. *R Journal* 9(1): 298-323. [10.32614/RJ-2017-020](https://doi.org/10.32614/RJ-2017-020)
|
51
|
+
- Wieland T (2018) A Hurdle Model Approach of Store Choice and Market Area Analysis in Grocery Retailing. *Papers in Applied Geography* 4(4): 370-389. [10.1080/23754931.2018.1519458](https://doi.org/10.1080/23754931.2018.1519458)
|
52
|
+
- Wieland T (2023) Spatial shopping behavior during the Corona pandemic: insights from a micro-econometric store choice model for consumer electronics and furniture retailing in Germany. *Journal of Geographical Systems* 25(2): 291–326. [10.1007/s10109-023-00408-x](https://doi.org/10.1007/s10109-023-00408-x)
|
53
|
+
|
54
|
+
|
55
|
+
## Installation
|
56
|
+
|
57
|
+
To install the package, use `pip`:
|
58
|
+
|
59
|
+
```bash
|
60
|
+
pip install huff
|
@@ -0,0 +1,216 @@
|
|
1
|
+
#-----------------------------------------------------------------------
|
2
|
+
# Name: gistools (huff package)
|
3
|
+
# Purpose: GIS tools
|
4
|
+
# Author: Thomas Wieland
|
5
|
+
# ORCID: 0000-0001-5168-9846
|
6
|
+
# mail: geowieland@googlemail.com
|
7
|
+
# Version: 1.3.1
|
8
|
+
# Last update: 2025-05-28 18:01
|
9
|
+
# Copyright (c) 2025 Thomas Wieland
|
10
|
+
#-----------------------------------------------------------------------
|
11
|
+
|
12
|
+
|
13
|
+
import geopandas as gp
|
14
|
+
import pandas as pd
|
15
|
+
from pandas.api.types import is_numeric_dtype
|
16
|
+
from math import pi, sin, cos, acos
|
17
|
+
|
18
|
+
|
19
|
+
def distance_matrix(
|
20
|
+
sources: list,
|
21
|
+
destinations: list,
|
22
|
+
unit: str = "m",
|
23
|
+
):
|
24
|
+
|
25
|
+
def euclidean_distance (
|
26
|
+
source: list,
|
27
|
+
destination: list,
|
28
|
+
unit: str = "m"
|
29
|
+
):
|
30
|
+
|
31
|
+
lon1 = source[0]
|
32
|
+
lat1 = source[1]
|
33
|
+
lon2 = destination[0]
|
34
|
+
lat2 = destination[1]
|
35
|
+
|
36
|
+
lat1_r = lat1*pi/180
|
37
|
+
lon1_r = lon1*pi/180
|
38
|
+
lat2_r = lat2*pi/180
|
39
|
+
lon2_r = lon2*pi/180
|
40
|
+
|
41
|
+
distance = 6378 * (acos(sin(lat1_r) * sin(lat2_r) + cos(lat1_r) * cos(lat2_r) * cos(lon2_r - lon1_r)))
|
42
|
+
if unit == "m":
|
43
|
+
distance = distance*1000
|
44
|
+
if unit == "mile":
|
45
|
+
distance = distance/1.60934
|
46
|
+
|
47
|
+
return distance
|
48
|
+
|
49
|
+
matrix = []
|
50
|
+
|
51
|
+
for source in sources:
|
52
|
+
row = []
|
53
|
+
for destination in destinations:
|
54
|
+
dist = euclidean_distance(
|
55
|
+
source,
|
56
|
+
destination,
|
57
|
+
unit
|
58
|
+
)
|
59
|
+
row.append(dist)
|
60
|
+
matrix.append(row)
|
61
|
+
|
62
|
+
return matrix
|
63
|
+
|
64
|
+
|
65
|
+
def buffers(
|
66
|
+
point_gdf: gp.GeoDataFrame,
|
67
|
+
unique_id_col: str,
|
68
|
+
distances: list,
|
69
|
+
donut: bool = True,
|
70
|
+
save_output: bool = True,
|
71
|
+
output_filepath: str = "buffers.shp",
|
72
|
+
output_crs: str = "EPSG:4326"
|
73
|
+
):
|
74
|
+
|
75
|
+
all_buffers_gdf = gp.GeoDataFrame(columns=[unique_id_col, "segment", "geometry"])
|
76
|
+
|
77
|
+
for idx, row in point_gdf.iterrows():
|
78
|
+
|
79
|
+
point_buffers = []
|
80
|
+
|
81
|
+
for distance in distances:
|
82
|
+
|
83
|
+
point = row["geometry"]
|
84
|
+
point_buffer = point.buffer(distance)
|
85
|
+
|
86
|
+
point_buffer_gdf = gp.GeoDataFrame(
|
87
|
+
{
|
88
|
+
unique_id_col: row[unique_id_col],
|
89
|
+
"geometry": [point_buffer],
|
90
|
+
"segment": [distance]
|
91
|
+
},
|
92
|
+
crs=point_gdf.crs
|
93
|
+
)
|
94
|
+
|
95
|
+
point_buffers.append(point_buffer_gdf)
|
96
|
+
|
97
|
+
point_buffers_gdf = pd.concat(
|
98
|
+
point_buffers,
|
99
|
+
ignore_index = True
|
100
|
+
)
|
101
|
+
|
102
|
+
if donut:
|
103
|
+
point_buffers_gdf = overlay_difference(
|
104
|
+
polygon_gdf = point_buffers_gdf,
|
105
|
+
sort_col = "segment"
|
106
|
+
)
|
107
|
+
|
108
|
+
all_buffers_gdf = pd.concat(
|
109
|
+
[
|
110
|
+
all_buffers_gdf,
|
111
|
+
point_buffers_gdf
|
112
|
+
],
|
113
|
+
ignore_index = True)
|
114
|
+
|
115
|
+
all_buffers_gdf = all_buffers_gdf.to_crs(output_crs)
|
116
|
+
|
117
|
+
if save_output:
|
118
|
+
all_buffers_gdf.to_file(output_filepath)
|
119
|
+
print ("Saved as", output_filepath)
|
120
|
+
|
121
|
+
return all_buffers_gdf
|
122
|
+
|
123
|
+
|
124
|
+
def overlay_difference(
|
125
|
+
polygon_gdf: gp.GeoDataFrame,
|
126
|
+
sort_col: str = None,
|
127
|
+
):
|
128
|
+
|
129
|
+
if sort_col is not None:
|
130
|
+
polygon_gdf = polygon_gdf.sort_values(by=sort_col).reset_index(drop=True)
|
131
|
+
else:
|
132
|
+
polygon_gdf = polygon_gdf.reset_index(drop=True)
|
133
|
+
|
134
|
+
new_geometries = []
|
135
|
+
new_data = []
|
136
|
+
|
137
|
+
for i in range(len(polygon_gdf) - 1, 0, -1):
|
138
|
+
|
139
|
+
current_polygon = polygon_gdf.iloc[i].geometry
|
140
|
+
previous_polygon = polygon_gdf.iloc[i - 1].geometry
|
141
|
+
difference_polygon = current_polygon.difference(previous_polygon)
|
142
|
+
|
143
|
+
if difference_polygon.is_empty or not difference_polygon.is_valid:
|
144
|
+
continue
|
145
|
+
|
146
|
+
new_geometries.append(difference_polygon)
|
147
|
+
new_data.append(polygon_gdf.iloc[i].drop("geometry"))
|
148
|
+
|
149
|
+
inner_most_polygon = polygon_gdf.iloc[0].geometry
|
150
|
+
|
151
|
+
if inner_most_polygon.is_valid:
|
152
|
+
|
153
|
+
new_geometries.append(inner_most_polygon)
|
154
|
+
new_data.append(polygon_gdf.iloc[0].drop("geometry"))
|
155
|
+
|
156
|
+
polygon_gdf_difference = gp.GeoDataFrame(
|
157
|
+
new_data, geometry=new_geometries, crs=polygon_gdf.crs
|
158
|
+
)
|
159
|
+
|
160
|
+
return polygon_gdf_difference
|
161
|
+
|
162
|
+
|
163
|
+
def point_spatial_join(
|
164
|
+
polygon_gdf: gp.GeoDataFrame,
|
165
|
+
point_gdf: gp.GeoDataFrame,
|
166
|
+
join_type: str = "inner",
|
167
|
+
polygon_ref_cols: list = [],
|
168
|
+
point_stat_col: str = None
|
169
|
+
):
|
170
|
+
|
171
|
+
if polygon_gdf.crs != point_gdf.crs:
|
172
|
+
raise ValueError (f"Coordinate reference systems of polygon and point data do not match. Polygons: {str(polygon_gdf.crs)}, points: {str(point_gdf.crs)}")
|
173
|
+
|
174
|
+
if polygon_ref_cols != []:
|
175
|
+
for polygon_ref_col in polygon_ref_cols:
|
176
|
+
if polygon_ref_col not in polygon_gdf.columns:
|
177
|
+
raise KeyError (f"Column {polygon_ref_col} not in polygon data")
|
178
|
+
|
179
|
+
if point_stat_col is not None:
|
180
|
+
if point_stat_col not in point_gdf.columns:
|
181
|
+
raise KeyError (f"Column {point_stat_col} not in point data")
|
182
|
+
if not is_numeric_dtype(point_gdf[point_stat_col]):
|
183
|
+
raise TypeError (f"Column {point_stat_col} is not numeric")
|
184
|
+
|
185
|
+
shp_points_gdf_join = point_gdf.sjoin(
|
186
|
+
polygon_gdf,
|
187
|
+
how=join_type
|
188
|
+
)
|
189
|
+
|
190
|
+
spatial_join_stat = None
|
191
|
+
|
192
|
+
if polygon_ref_cols != [] and point_stat_col is not None:
|
193
|
+
shp_points_gdf_join_count = shp_points_gdf_join.groupby(polygon_ref_cols)[point_stat_col].count()
|
194
|
+
shp_points_gdf_join_sum = shp_points_gdf_join.groupby(polygon_ref_cols)[point_stat_col].sum()
|
195
|
+
shp_points_gdf_join_min = shp_points_gdf_join.groupby(polygon_ref_cols)[point_stat_col].min()
|
196
|
+
shp_points_gdf_join_max = shp_points_gdf_join.groupby(polygon_ref_cols)[point_stat_col].max()
|
197
|
+
shp_points_gdf_join_mean = shp_points_gdf_join.groupby(polygon_ref_cols)[point_stat_col].mean()
|
198
|
+
|
199
|
+
shp_points_gdf_join_count = shp_points_gdf_join_count.rename("count").to_frame()
|
200
|
+
shp_points_gdf_join_sum = shp_points_gdf_join_sum.rename("sum").to_frame()
|
201
|
+
shp_points_gdf_join_min = shp_points_gdf_join_min.rename("min").to_frame()
|
202
|
+
shp_points_gdf_join_max = shp_points_gdf_join_max.rename("max").to_frame()
|
203
|
+
shp_points_gdf_join_mean = shp_points_gdf_join_mean.rename("mean").to_frame()
|
204
|
+
spatial_join_stat = shp_points_gdf_join_count.join(
|
205
|
+
[
|
206
|
+
shp_points_gdf_join_sum,
|
207
|
+
shp_points_gdf_join_min,
|
208
|
+
shp_points_gdf_join_max,
|
209
|
+
shp_points_gdf_join_mean
|
210
|
+
]
|
211
|
+
)
|
212
|
+
|
213
|
+
return [
|
214
|
+
shp_points_gdf_join,
|
215
|
+
spatial_join_stat
|
216
|
+
]
|