huff 1.0.0__tar.gz → 1.1.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- huff-1.1.1/PKG-INFO +49 -0
- huff-1.1.1/README.md +41 -0
- {huff-1.0.0 → huff-1.1.1}/huff/gistools.py +2 -2
- {huff-1.0.0 → huff-1.1.1}/huff/models.py +286 -20
- huff-1.1.1/huff/ors.py +383 -0
- {huff-1.0.0 → huff-1.1.1}/huff/tests/tests_huff.py +31 -18
- huff-1.1.1/huff.egg-info/PKG-INFO +49 -0
- {huff-1.0.0 → huff-1.1.1}/setup.py +1 -1
- huff-1.0.0/PKG-INFO +0 -32
- huff-1.0.0/README.md +0 -15
- huff-1.0.0/huff/ors.py +0 -334
- huff-1.0.0/huff.egg-info/PKG-INFO +0 -32
- {huff-1.0.0 → huff-1.1.1}/MANIFEST.in +0 -0
- {huff-1.0.0 → huff-1.1.1}/huff/__init__.py +0 -0
- {huff-1.0.0 → huff-1.1.1}/huff/tests/__init__.py +0 -0
- {huff-1.0.0 → huff-1.1.1}/huff/tests/data/Haslach.cpg +0 -0
- {huff-1.0.0 → huff-1.1.1}/huff/tests/data/Haslach.dbf +0 -0
- {huff-1.0.0 → huff-1.1.1}/huff/tests/data/Haslach.prj +0 -0
- {huff-1.0.0 → huff-1.1.1}/huff/tests/data/Haslach.qmd +0 -0
- {huff-1.0.0 → huff-1.1.1}/huff/tests/data/Haslach.shp +0 -0
- {huff-1.0.0 → huff-1.1.1}/huff/tests/data/Haslach.shx +0 -0
- {huff-1.0.0 → huff-1.1.1}/huff/tests/data/Haslach_supermarkets.cpg +0 -0
- {huff-1.0.0 → huff-1.1.1}/huff/tests/data/Haslach_supermarkets.dbf +0 -0
- {huff-1.0.0 → huff-1.1.1}/huff/tests/data/Haslach_supermarkets.prj +0 -0
- {huff-1.0.0 → huff-1.1.1}/huff/tests/data/Haslach_supermarkets.qmd +0 -0
- {huff-1.0.0 → huff-1.1.1}/huff/tests/data/Haslach_supermarkets.shp +0 -0
- {huff-1.0.0 → huff-1.1.1}/huff/tests/data/Haslach_supermarkets.shx +0 -0
- {huff-1.0.0 → huff-1.1.1}/huff.egg-info/SOURCES.txt +0 -0
- {huff-1.0.0 → huff-1.1.1}/huff.egg-info/dependency_links.txt +0 -0
- {huff-1.0.0 → huff-1.1.1}/huff.egg-info/requires.txt +0 -0
- {huff-1.0.0 → huff-1.1.1}/huff.egg-info/top_level.txt +0 -0
- {huff-1.0.0 → huff-1.1.1}/setup.cfg +0 -0
huff-1.1.1/PKG-INFO
ADDED
@@ -0,0 +1,49 @@
|
|
1
|
+
Metadata-Version: 2.1
|
2
|
+
Name: huff
|
3
|
+
Version: 1.1.1
|
4
|
+
Summary: huff: Huff Model Market Area Analysis
|
5
|
+
Author: Thomas Wieland
|
6
|
+
Author-email: geowieland@googlemail.com
|
7
|
+
Description-Content-Type: text/markdown
|
8
|
+
|
9
|
+
# huff: Huff Model Market Area Analysis
|
10
|
+
|
11
|
+
## Author
|
12
|
+
|
13
|
+
Thomas Wieland [ORCID](https://orcid.org/0000-0001-5168-9846) [EMail](mailto:geowieland@googlemail.com)
|
14
|
+
|
15
|
+
See the /tests directory for usage examples of most of the included functions.
|
16
|
+
|
17
|
+
|
18
|
+
## Features
|
19
|
+
|
20
|
+
- **Huff Model**:
|
21
|
+
- Defining origins and destinations with weightings
|
22
|
+
- Creating interaction matrix from origins and destinations
|
23
|
+
- Calculating basic Huff Model
|
24
|
+
- **Multiplicative Competitive Interaction Model**:
|
25
|
+
- Log-centering transformation
|
26
|
+
- **OpenRouteService Client** (Tools via API):
|
27
|
+
- Creating transport costs matrix from origins and destinations
|
28
|
+
- Creating isochrones from destinations
|
29
|
+
|
30
|
+
Attribution of OpenRouteService:
|
31
|
+
© openrouteservice.org by HeiGIT | Map data © OpenStreetMap contributors
|
32
|
+
Visit https://openrouteservice.org/
|
33
|
+
|
34
|
+
## Literature
|
35
|
+
- Huff DL (1962) *Determination of Intra-Urban Retail Trade Areas*.
|
36
|
+
- Huff DL (1964) Defining and estimating a trading area. *Journal of Marketing* 28(4): 34–38. [10.2307/1249154](https://doi.org/10.2307/1249154)
|
37
|
+
- Huff DL, McCallum BM (2008) Calibrating the Huff Model using ArcGIS Business Analyst. ESRI White Paper, September 2008. https://www.esri.com/library/whitepapers/pdfs/calibrating-huff-model.pdf.
|
38
|
+
- De Beule M, Van den Poel D, Van de Weghe N (2014) An extended Huff-model for robustly benchmarking and predicting retail network performance. *Applied Geography*,* 46(1): 80–89. [10.1016/j.apgeog.2013.09.026](https://doi.org/10.1016/j.apgeog.2013.09.026)
|
39
|
+
- Nakanishi M, Cooper LG (1974) Parameter estimation for a Multiplicative Competitive Interaction Model: Least squares approach. *Journal of Marketing Research* 11(3): 303–311. [10.2307/3151146](https://doi.org/10.2307/3151146).
|
40
|
+
- Wieland T (2017) Market Area Analysis for Retail and Service Locations with MCI. *R Journal* 9(1): 298-323. [10.32614/RJ-2017-020](https://doi.org/10.32614/RJ-2017-020)
|
41
|
+
- Wieland T (2018) A Hurdle Model Approach of Store Choice and Market Area Analysis in Grocery Retailing. *Papers in Applied Geography* 4(4): 370-389. [10.1080/23754931.2018.1519458](https://doi.org/10.1080/23754931.2018.1519458)
|
42
|
+
|
43
|
+
|
44
|
+
## Installation
|
45
|
+
|
46
|
+
To install the package, use `pip`:
|
47
|
+
|
48
|
+
```bash
|
49
|
+
pip install huff
|
huff-1.1.1/README.md
ADDED
@@ -0,0 +1,41 @@
|
|
1
|
+
# huff: Huff Model Market Area Analysis
|
2
|
+
|
3
|
+
## Author
|
4
|
+
|
5
|
+
Thomas Wieland [ORCID](https://orcid.org/0000-0001-5168-9846) [EMail](mailto:geowieland@googlemail.com)
|
6
|
+
|
7
|
+
See the /tests directory for usage examples of most of the included functions.
|
8
|
+
|
9
|
+
|
10
|
+
## Features
|
11
|
+
|
12
|
+
- **Huff Model**:
|
13
|
+
- Defining origins and destinations with weightings
|
14
|
+
- Creating interaction matrix from origins and destinations
|
15
|
+
- Calculating basic Huff Model
|
16
|
+
- **Multiplicative Competitive Interaction Model**:
|
17
|
+
- Log-centering transformation
|
18
|
+
- **OpenRouteService Client** (Tools via API):
|
19
|
+
- Creating transport costs matrix from origins and destinations
|
20
|
+
- Creating isochrones from destinations
|
21
|
+
|
22
|
+
Attribution of OpenRouteService:
|
23
|
+
© openrouteservice.org by HeiGIT | Map data © OpenStreetMap contributors
|
24
|
+
Visit https://openrouteservice.org/
|
25
|
+
|
26
|
+
## Literature
|
27
|
+
- Huff DL (1962) *Determination of Intra-Urban Retail Trade Areas*.
|
28
|
+
- Huff DL (1964) Defining and estimating a trading area. *Journal of Marketing* 28(4): 34–38. [10.2307/1249154](https://doi.org/10.2307/1249154)
|
29
|
+
- Huff DL, McCallum BM (2008) Calibrating the Huff Model using ArcGIS Business Analyst. ESRI White Paper, September 2008. https://www.esri.com/library/whitepapers/pdfs/calibrating-huff-model.pdf.
|
30
|
+
- De Beule M, Van den Poel D, Van de Weghe N (2014) An extended Huff-model for robustly benchmarking and predicting retail network performance. *Applied Geography*,* 46(1): 80–89. [10.1016/j.apgeog.2013.09.026](https://doi.org/10.1016/j.apgeog.2013.09.026)
|
31
|
+
- Nakanishi M, Cooper LG (1974) Parameter estimation for a Multiplicative Competitive Interaction Model: Least squares approach. *Journal of Marketing Research* 11(3): 303–311. [10.2307/3151146](https://doi.org/10.2307/3151146).
|
32
|
+
- Wieland T (2017) Market Area Analysis for Retail and Service Locations with MCI. *R Journal* 9(1): 298-323. [10.32614/RJ-2017-020](https://doi.org/10.32614/RJ-2017-020)
|
33
|
+
- Wieland T (2018) A Hurdle Model Approach of Store Choice and Market Area Analysis in Grocery Retailing. *Papers in Applied Geography* 4(4): 370-389. [10.1080/23754931.2018.1519458](https://doi.org/10.1080/23754931.2018.1519458)
|
34
|
+
|
35
|
+
|
36
|
+
## Installation
|
37
|
+
|
38
|
+
To install the package, use `pip`:
|
39
|
+
|
40
|
+
```bash
|
41
|
+
pip install huff
|
@@ -4,8 +4,8 @@
|
|
4
4
|
# Author: Thomas Wieland
|
5
5
|
# ORCID: 0000-0001-5168-9846
|
6
6
|
# mail: geowieland@googlemail.com
|
7
|
-
# Version: 1.
|
8
|
-
# Last update: 2025-04-
|
7
|
+
# Version: 1.1.1
|
8
|
+
# Last update: 2025-04-29 18:12
|
9
9
|
# Copyright (c) 2025 Thomas Wieland
|
10
10
|
#-----------------------------------------------------------------------
|
11
11
|
|
@@ -4,8 +4,8 @@
|
|
4
4
|
# Author: Thomas Wieland
|
5
5
|
# ORCID: 0000-0001-5168-9846
|
6
6
|
# mail: geowieland@googlemail.com
|
7
|
-
# Version: 1.
|
8
|
-
# Last update: 2025-04-
|
7
|
+
# Version: 1.1.1
|
8
|
+
# Last update: 2025-04-29 18:12
|
9
9
|
# Copyright (c) 2025 Thomas Wieland
|
10
10
|
#-----------------------------------------------------------------------
|
11
11
|
|
@@ -13,7 +13,9 @@
|
|
13
13
|
import pandas as pd
|
14
14
|
import geopandas as gp
|
15
15
|
import numpy as np
|
16
|
-
|
16
|
+
import time
|
17
|
+
from huff.ors import Client, TimeDistanceMatrix, Isochrone
|
18
|
+
from huff.gistools import overlay_difference
|
17
19
|
|
18
20
|
|
19
21
|
class CustomerOrigins:
|
@@ -198,28 +200,142 @@ class SupplyLocations:
|
|
198
200
|
self.metadata = metadata
|
199
201
|
|
200
202
|
return self
|
203
|
+
|
204
|
+
def add_new_destinations(
|
205
|
+
self,
|
206
|
+
new_destinations,
|
207
|
+
):
|
208
|
+
|
209
|
+
geodata_gpd_original = self.get_geodata_gpd_original()
|
210
|
+
geodata_gpd = self.get_geodata_gpd()
|
211
|
+
metadata = self.get_metadata()
|
212
|
+
|
213
|
+
new_destinations_gpd_original = new_destinations.get_geodata_gpd_original()
|
214
|
+
new_destinations_gpd = new_destinations.get_geodata_gpd()
|
215
|
+
new_destinations_metadata = new_destinations.get_metadata()
|
216
|
+
|
217
|
+
if list(new_destinations_gpd_original.columns) != list(geodata_gpd_original.columns):
|
218
|
+
raise KeyError("Supply locations and new destinations data have different column names.")
|
219
|
+
if list(new_destinations_gpd.columns) != list(geodata_gpd.columns):
|
220
|
+
raise KeyError("Supply locations and new destinations data have different column names.")
|
221
|
+
|
222
|
+
geodata_gpd_original = geodata_gpd_original.append(
|
223
|
+
new_destinations_gpd_original,
|
224
|
+
ignore_index=True
|
225
|
+
)
|
226
|
+
|
227
|
+
geodata_gpd = geodata_gpd.append(
|
228
|
+
new_destinations_gpd,
|
229
|
+
ignore_index=True
|
230
|
+
)
|
231
|
+
|
232
|
+
metadata["no_points"] = metadata["no_points"]+new_destinations_metadata["no_points"]
|
233
|
+
|
234
|
+
self.geodata_gpd = geodata_gpd
|
235
|
+
self.geodata_gpd_original = geodata_gpd_original
|
236
|
+
self.metadata = metadata
|
237
|
+
|
238
|
+
return self
|
239
|
+
|
240
|
+
def isochrones(
|
241
|
+
self,
|
242
|
+
segments: list = [900, 600, 300],
|
243
|
+
range_type: str = "time",
|
244
|
+
intersections: str = "true",
|
245
|
+
profile: str = "driving-car",
|
246
|
+
donut: bool = True,
|
247
|
+
ors_server: str = "https://api.openrouteservice.org/v2/",
|
248
|
+
ors_auth: str = None,
|
249
|
+
timeout = 10,
|
250
|
+
delay = 1,
|
251
|
+
save_output: bool = True,
|
252
|
+
output_filepath: str = "isochrones.shp",
|
253
|
+
output_crs: str = "EPSG:4326"
|
254
|
+
):
|
255
|
+
|
256
|
+
geodata_gpd = self.get_geodata_gpd()
|
257
|
+
metadata = self.get_metadata()
|
258
|
+
|
259
|
+
coords = [(point.x, point.y) for point in geodata_gpd.geometry]
|
260
|
+
|
261
|
+
unique_id_col = metadata["unique_id"]
|
262
|
+
unique_id_values = geodata_gpd[unique_id_col].values
|
263
|
+
|
264
|
+
ors_client = Client(
|
265
|
+
server = ors_server,
|
266
|
+
auth = ors_auth
|
267
|
+
)
|
268
|
+
|
269
|
+
isochrones_gdf = gp.GeoDataFrame(columns=[unique_id_col, "geometry"])
|
270
|
+
|
271
|
+
i = 0
|
272
|
+
|
273
|
+
for x, y in coords:
|
274
|
+
|
275
|
+
isochrone_output = ors_client.isochrone(
|
276
|
+
locations = [[x, y]],
|
277
|
+
segments = segments,
|
278
|
+
range_type = range_type,
|
279
|
+
intersections = intersections,
|
280
|
+
profile = profile,
|
281
|
+
timeout = timeout,
|
282
|
+
save_output = False,
|
283
|
+
output_crs = output_crs
|
284
|
+
)
|
285
|
+
|
286
|
+
if isochrone_output.status_code != 200:
|
287
|
+
continue
|
288
|
+
|
289
|
+
isochrone_gdf = isochrone_output.get_isochrones_gdf()
|
290
|
+
|
291
|
+
if donut:
|
292
|
+
isochrone_gdf = overlay_difference(
|
293
|
+
polygon_gdf = isochrone_gdf,
|
294
|
+
sort_col = "segment"
|
295
|
+
)
|
296
|
+
|
297
|
+
time.sleep(delay)
|
298
|
+
|
299
|
+
isochrone_gdf[unique_id_col] = unique_id_values[i]
|
300
|
+
|
301
|
+
isochrones_gdf = pd.concat(
|
302
|
+
[
|
303
|
+
isochrones_gdf,
|
304
|
+
isochrone_gdf
|
305
|
+
],
|
306
|
+
ignore_index=True
|
307
|
+
)
|
308
|
+
|
309
|
+
i = i+1
|
310
|
+
|
311
|
+
isochrones_gdf.set_crs(
|
312
|
+
output_crs,
|
313
|
+
allow_override=True,
|
314
|
+
inplace=True
|
315
|
+
)
|
316
|
+
|
317
|
+
if save_output:
|
318
|
+
|
319
|
+
isochrones_gdf.to_file(filename = output_filepath)
|
320
|
+
|
321
|
+
return isochrones_gdf
|
201
322
|
|
202
323
|
class InteractionMatrix:
|
203
324
|
|
204
325
|
def __init__(
|
205
326
|
self,
|
206
327
|
interaction_matrix_df,
|
207
|
-
market_areas_df,
|
208
328
|
customer_origins,
|
209
329
|
supply_locations
|
210
330
|
):
|
211
331
|
|
212
332
|
self.interaction_matrix_df = interaction_matrix_df
|
213
|
-
self.market_areas_df = market_areas_df
|
214
333
|
self.customer_origins = customer_origins
|
215
334
|
self.supply_locations = supply_locations
|
216
335
|
|
217
336
|
def get_interaction_matrix_df(self):
|
218
337
|
return self.interaction_matrix_df
|
219
338
|
|
220
|
-
def get_market_areas_df(self):
|
221
|
-
return self.market_areas_df
|
222
|
-
|
223
339
|
def get_customer_origins(self):
|
224
340
|
return self.customer_origins
|
225
341
|
|
@@ -231,7 +347,7 @@ class InteractionMatrix:
|
|
231
347
|
customer_origins_metadata = self.get_customer_origins().get_metadata()
|
232
348
|
supply_locations_metadata = self.get_supply_locations().get_metadata()
|
233
349
|
|
234
|
-
print("Huff Model")
|
350
|
+
print("Huff Model Interaction Matrix")
|
235
351
|
print("----------------------------------")
|
236
352
|
print("Supply locations " + str(supply_locations_metadata["no_points"]))
|
237
353
|
if supply_locations_metadata["attraction_col"][0] is None:
|
@@ -259,6 +375,7 @@ class InteractionMatrix:
|
|
259
375
|
self,
|
260
376
|
range_type: str = "time",
|
261
377
|
time_unit: str = "minutes",
|
378
|
+
ors_server: str = "https://api.openrouteservice.org/v2/",
|
262
379
|
ors_auth: str = None,
|
263
380
|
save_output: bool = False,
|
264
381
|
output_filepath: str = "transport_costs_matrix.csv"
|
@@ -285,8 +402,11 @@ class InteractionMatrix:
|
|
285
402
|
customer_origins_index = list(range(len(customer_origins_coords)))
|
286
403
|
locations_coords_index = list(range(len(customer_origins_index), len(locations_coords)))
|
287
404
|
|
288
|
-
|
289
|
-
|
405
|
+
ors_client = Client(
|
406
|
+
server = ors_server,
|
407
|
+
auth = ors_auth
|
408
|
+
)
|
409
|
+
time_distance_matrix = ors_client.matrix(
|
290
410
|
locations = locations_coords,
|
291
411
|
save_output = save_output,
|
292
412
|
output_filepath = output_filepath,
|
@@ -295,6 +415,9 @@ class InteractionMatrix:
|
|
295
415
|
range_type = range_type
|
296
416
|
)
|
297
417
|
|
418
|
+
if time_distance_matrix.get_metadata() is None:
|
419
|
+
raise ValueError ("No transport costs matrix was built.")
|
420
|
+
|
298
421
|
transport_costs_matrix = time_distance_matrix.get_matrix()
|
299
422
|
transport_costs_matrix_config = time_distance_matrix.get_config()
|
300
423
|
range_type = transport_costs_matrix_config["range_type"]
|
@@ -339,6 +462,11 @@ class InteractionMatrix:
|
|
339
462
|
if interaction_matrix_df["A_j"].isna().all():
|
340
463
|
raise ValueError ("Attraction variable is not defined")
|
341
464
|
|
465
|
+
check_vars(
|
466
|
+
df = interaction_matrix_df,
|
467
|
+
cols = ["A_j", "t_ij"]
|
468
|
+
)
|
469
|
+
|
342
470
|
customer_origins = self.customer_origins
|
343
471
|
customer_origins_metadata = customer_origins.get_metadata()
|
344
472
|
tc_weighting = customer_origins_metadata["weighting"][0]
|
@@ -400,6 +528,11 @@ class InteractionMatrix:
|
|
400
528
|
if interaction_matrix_df["C_i"].isna().all():
|
401
529
|
raise ValueError ("Market size column in customer origins not defined. Use CustomerOrigins.define_marketsize()")
|
402
530
|
|
531
|
+
check_vars(
|
532
|
+
df = interaction_matrix_df,
|
533
|
+
cols = ["C_i"]
|
534
|
+
)
|
535
|
+
|
403
536
|
if interaction_matrix_df["p_ij"].isna().all():
|
404
537
|
self.probabilities()
|
405
538
|
interaction_matrix_df = self.interaction_matrix_df
|
@@ -413,17 +546,110 @@ class InteractionMatrix:
|
|
413
546
|
def marketareas (self):
|
414
547
|
|
415
548
|
interaction_matrix_df = self.interaction_matrix_df
|
416
|
-
|
549
|
+
|
550
|
+
check_vars(
|
551
|
+
df = interaction_matrix_df,
|
552
|
+
cols = ["E_ij"]
|
553
|
+
)
|
554
|
+
|
417
555
|
market_areas_df = pd.DataFrame(interaction_matrix_df.groupby("j")["E_ij"].sum())
|
418
556
|
market_areas_df = market_areas_df.reset_index(drop=False)
|
419
557
|
market_areas_df = market_areas_df.rename(columns={"E_ij": "T_j"})
|
420
558
|
|
421
|
-
|
559
|
+
huff_model = HuffModel(
|
560
|
+
self,
|
561
|
+
market_areas_df
|
562
|
+
)
|
563
|
+
|
564
|
+
return huff_model
|
565
|
+
|
566
|
+
def mci_transformation(
|
567
|
+
self,
|
568
|
+
cols: list = ["A_j", "t_ij"]
|
569
|
+
):
|
422
570
|
|
423
|
-
|
571
|
+
""" MCI model log-centering transformation """
|
572
|
+
|
573
|
+
cols = cols + ["p_ij"]
|
574
|
+
|
575
|
+
interaction_matrix_df = self.interaction_matrix_df
|
576
|
+
|
577
|
+
interaction_matrix_df = mci_transformation(
|
578
|
+
df = interaction_matrix_df,
|
579
|
+
ref_col = "i",
|
580
|
+
cols = cols
|
581
|
+
)
|
582
|
+
|
583
|
+
self.interaction_matrix_df = interaction_matrix_df
|
424
584
|
|
425
585
|
return self
|
426
586
|
|
587
|
+
class HuffModel:
|
588
|
+
|
589
|
+
def __init__(
|
590
|
+
self,
|
591
|
+
interaction_matrix,
|
592
|
+
market_areas_df
|
593
|
+
):
|
594
|
+
|
595
|
+
self.interaction_matrix = interaction_matrix
|
596
|
+
self.market_areas_df = market_areas_df
|
597
|
+
|
598
|
+
def get_interaction_matrix_df(self):
|
599
|
+
|
600
|
+
interaction_matrix = self.interaction_matrix
|
601
|
+
interaction_matrix_df = interaction_matrix.get_interaction_matrix_df()
|
602
|
+
|
603
|
+
return interaction_matrix_df
|
604
|
+
|
605
|
+
def get_supply_locations(self):
|
606
|
+
|
607
|
+
interaction_matrix = self.interaction_matrix
|
608
|
+
supply_locations = interaction_matrix.get_supply_locations()
|
609
|
+
|
610
|
+
return supply_locations
|
611
|
+
|
612
|
+
def get_customer_origins(self):
|
613
|
+
|
614
|
+
interaction_matrix = self.interaction_matrix
|
615
|
+
customer_origins = interaction_matrix.get_customer_origins()
|
616
|
+
|
617
|
+
return customer_origins
|
618
|
+
|
619
|
+
def get_market_areas_df(self):
|
620
|
+
return self.market_areas_df
|
621
|
+
|
622
|
+
def summary(self):
|
623
|
+
|
624
|
+
interaction_matrix = self.interaction_matrix
|
625
|
+
|
626
|
+
customer_origins_metadata = interaction_matrix.get_customer_origins().get_metadata()
|
627
|
+
supply_locations_metadata = interaction_matrix.get_supply_locations().get_metadata()
|
628
|
+
|
629
|
+
print("Huff Model")
|
630
|
+
print("----------------------------------")
|
631
|
+
print("Supply locations " + str(supply_locations_metadata["no_points"]))
|
632
|
+
if supply_locations_metadata["attraction_col"][0] is None:
|
633
|
+
print("Attraction column not defined")
|
634
|
+
else:
|
635
|
+
print("Attraction column " + supply_locations_metadata["attraction_col"][0])
|
636
|
+
print("Customer origins " + str(customer_origins_metadata["no_points"]))
|
637
|
+
if customer_origins_metadata["marketsize_col"] is None:
|
638
|
+
print("Market size column not defined")
|
639
|
+
else:
|
640
|
+
print("Market size column " + customer_origins_metadata["marketsize_col"])
|
641
|
+
print("----------------------------------")
|
642
|
+
print("Weights")
|
643
|
+
if supply_locations_metadata["weighting"][0]["func"] is None and supply_locations_metadata["weighting"][0]["param"] is None:
|
644
|
+
print("Gamma not defined")
|
645
|
+
else:
|
646
|
+
print("Gamma " + str(supply_locations_metadata["weighting"][0]["param"]) + " (" + supply_locations_metadata["weighting"][0]["func"] + ")")
|
647
|
+
if customer_origins_metadata["weighting"][0]["func"] is None and customer_origins_metadata["weighting"][0]["param"] is None:
|
648
|
+
print("Lambda not defined")
|
649
|
+
else:
|
650
|
+
print("Lambda " + str(customer_origins_metadata["weighting"][0]["param"]) + " (" + customer_origins_metadata["weighting"][0]["func"] + ")")
|
651
|
+
print("----------------------------------")
|
652
|
+
|
427
653
|
def load_geodata (
|
428
654
|
file,
|
429
655
|
location_type: str,
|
@@ -438,7 +664,7 @@ def load_geodata (
|
|
438
664
|
):
|
439
665
|
|
440
666
|
if location_type is None or (location_type != "origins" and location_type != "destinations"):
|
441
|
-
raise ValueError ("location_type must be either '
|
667
|
+
raise ValueError ("location_type must be either 'origins' or 'destinations'")
|
442
668
|
|
443
669
|
if data_type not in ["shp", "csv", "xlsx"]:
|
444
670
|
raise ValueError ("data_type must be 'shp', 'csv' or 'xlsx'")
|
@@ -517,7 +743,7 @@ def create_interaction_matrix(
|
|
517
743
|
if not isinstance(customer_origins, CustomerOrigins):
|
518
744
|
raise ValueError ("customer_origins must be of class CustomerOrigins")
|
519
745
|
if not isinstance(supply_locations, SupplyLocations):
|
520
|
-
raise ValueError ("supply_locations must be of class
|
746
|
+
raise ValueError ("supply_locations must be of class SupplyLocations")
|
521
747
|
|
522
748
|
customer_origins_metadata = customer_origins.get_metadata()
|
523
749
|
if customer_origins_metadata["marketsize_col"] is None:
|
@@ -573,13 +799,53 @@ def create_interaction_matrix(
|
|
573
799
|
interaction_matrix_df["p_ij"] = None
|
574
800
|
interaction_matrix_df["E_ij"] = None
|
575
801
|
|
576
|
-
market_areas_df = None
|
577
|
-
|
578
802
|
interaction_matrix = InteractionMatrix(
|
579
803
|
interaction_matrix_df,
|
580
|
-
market_areas_df,
|
581
804
|
customer_origins,
|
582
805
|
supply_locations
|
583
806
|
)
|
584
807
|
|
585
|
-
return interaction_matrix
|
808
|
+
return interaction_matrix
|
809
|
+
|
810
|
+
def check_vars(
|
811
|
+
df: pd.DataFrame,
|
812
|
+
cols: list
|
813
|
+
):
|
814
|
+
|
815
|
+
for col in cols:
|
816
|
+
if col not in df.columns:
|
817
|
+
raise KeyError(f"Column '{col}' not in dataframe.")
|
818
|
+
|
819
|
+
for col in cols:
|
820
|
+
if not pd.api.types.is_numeric_dtype(df[col]):
|
821
|
+
raise ValueError(f"Column '{col}' is not numeric. All columns must be numeric.")
|
822
|
+
|
823
|
+
for col in cols:
|
824
|
+
if (df[col] <= 0).any():
|
825
|
+
raise ValueError(f"Column '{col}' includes values <= 0. All values must be numeric and positive.")
|
826
|
+
|
827
|
+
def mci_transformation(
|
828
|
+
df: pd.DataFrame,
|
829
|
+
ref_col: str,
|
830
|
+
cols: list
|
831
|
+
):
|
832
|
+
|
833
|
+
check_vars(
|
834
|
+
df = df,
|
835
|
+
cols = cols + [ref_col]
|
836
|
+
)
|
837
|
+
|
838
|
+
def lct (x):
|
839
|
+
|
840
|
+
x_geom = np.exp(np.log(x).mean())
|
841
|
+
x_lct = np.log(x/x_geom)
|
842
|
+
|
843
|
+
return x_lct
|
844
|
+
|
845
|
+
for var in cols:
|
846
|
+
|
847
|
+
var_t = df.groupby(ref_col)[var].apply(lct)
|
848
|
+
var_t = var_t.reset_index()
|
849
|
+
df[var+"_t"] = var_t[var]
|
850
|
+
|
851
|
+
return df
|