huff 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- huff/__init__.py +0 -0
- huff/gistools.py +49 -0
- huff/models.py +585 -0
- huff/ors.py +334 -0
- huff/tests/__init__.py +0 -0
- huff/tests/data/Haslach.cpg +1 -0
- huff/tests/data/Haslach.dbf +0 -0
- huff/tests/data/Haslach.prj +1 -0
- huff/tests/data/Haslach.qmd +43 -0
- huff/tests/data/Haslach.shp +0 -0
- huff/tests/data/Haslach.shx +0 -0
- huff/tests/data/Haslach_supermarkets.cpg +1 -0
- huff/tests/data/Haslach_supermarkets.dbf +0 -0
- huff/tests/data/Haslach_supermarkets.prj +1 -0
- huff/tests/data/Haslach_supermarkets.qmd +43 -0
- huff/tests/data/Haslach_supermarkets.shp +0 -0
- huff/tests/data/Haslach_supermarkets.shx +0 -0
- huff/tests/tests_huff.py +89 -0
- huff-1.0.0.dist-info/METADATA +32 -0
- huff-1.0.0.dist-info/RECORD +22 -0
- huff-1.0.0.dist-info/WHEEL +5 -0
- huff-1.0.0.dist-info/top_level.txt +1 -0
huff/__init__.py
ADDED
File without changes
|
huff/gistools.py
ADDED
@@ -0,0 +1,49 @@
|
|
1
|
+
#-----------------------------------------------------------------------
|
2
|
+
# Name: gistools (huff package)
|
3
|
+
# Purpose: GIS tools
|
4
|
+
# Author: Thomas Wieland
|
5
|
+
# ORCID: 0000-0001-5168-9846
|
6
|
+
# mail: geowieland@googlemail.com
|
7
|
+
# Version: 1.0.0
|
8
|
+
# Last update: 2025-04-25 18:16
|
9
|
+
# Copyright (c) 2025 Thomas Wieland
|
10
|
+
#-----------------------------------------------------------------------
|
11
|
+
|
12
|
+
|
13
|
+
import geopandas as gp
|
14
|
+
|
15
|
+
|
16
|
+
def overlay_difference(
|
17
|
+
polygon_gdf: gp.GeoDataFrame,
|
18
|
+
sort_col: str = None,
|
19
|
+
):
|
20
|
+
|
21
|
+
if sort_col is not None:
|
22
|
+
polygon_gdf = polygon_gdf.sort_values(by=sort_col).reset_index(drop=True)
|
23
|
+
else:
|
24
|
+
polygon_gdf = polygon_gdf.reset_index(drop=True)
|
25
|
+
|
26
|
+
new_geometries = []
|
27
|
+
new_data = []
|
28
|
+
|
29
|
+
for i in range(len(polygon_gdf) - 1, 0, -1):
|
30
|
+
current_polygon = polygon_gdf.iloc[i].geometry
|
31
|
+
previous_polygon = polygon_gdf.iloc[i - 1].geometry
|
32
|
+
difference_polygon = current_polygon.difference(previous_polygon)
|
33
|
+
|
34
|
+
if difference_polygon.is_empty or not difference_polygon.is_valid:
|
35
|
+
continue
|
36
|
+
|
37
|
+
new_geometries.append(difference_polygon)
|
38
|
+
new_data.append(polygon_gdf.iloc[i].drop("geometry"))
|
39
|
+
|
40
|
+
inner_most_polygon = polygon_gdf.iloc[0].geometry
|
41
|
+
if inner_most_polygon.is_valid:
|
42
|
+
new_geometries.append(inner_most_polygon)
|
43
|
+
new_data.append(polygon_gdf.iloc[0].drop("geometry"))
|
44
|
+
|
45
|
+
polygon_gdf_difference = gp.GeoDataFrame(
|
46
|
+
new_data, geometry=new_geometries, crs=polygon_gdf.crs
|
47
|
+
)
|
48
|
+
|
49
|
+
return polygon_gdf_difference
|
huff/models.py
ADDED
@@ -0,0 +1,585 @@
|
|
1
|
+
#-----------------------------------------------------------------------
|
2
|
+
# Name: models (huff package)
|
3
|
+
# Purpose: Huff Model classes and functions
|
4
|
+
# Author: Thomas Wieland
|
5
|
+
# ORCID: 0000-0001-5168-9846
|
6
|
+
# mail: geowieland@googlemail.com
|
7
|
+
# Version: 1.0.0
|
8
|
+
# Last update: 2025-04-25 18:13
|
9
|
+
# Copyright (c) 2025 Thomas Wieland
|
10
|
+
#-----------------------------------------------------------------------
|
11
|
+
|
12
|
+
|
13
|
+
import pandas as pd
|
14
|
+
import geopandas as gp
|
15
|
+
import numpy as np
|
16
|
+
from .ors import matrix
|
17
|
+
|
18
|
+
|
19
|
+
class CustomerOrigins:
|
20
|
+
|
21
|
+
def __init__(
|
22
|
+
self,
|
23
|
+
geodata_gpd,
|
24
|
+
geodata_gpd_original,
|
25
|
+
metadata
|
26
|
+
):
|
27
|
+
|
28
|
+
self.geodata_gpd = geodata_gpd
|
29
|
+
self.geodata_gpd_original = geodata_gpd_original
|
30
|
+
self.metadata = metadata
|
31
|
+
|
32
|
+
def get_geodata_gpd(self):
|
33
|
+
|
34
|
+
return self.geodata_gpd
|
35
|
+
|
36
|
+
def get_geodata_gpd_original(self):
|
37
|
+
|
38
|
+
return self.geodata_gpd_original
|
39
|
+
|
40
|
+
def get_metadata(self):
|
41
|
+
|
42
|
+
return self.metadata
|
43
|
+
|
44
|
+
def summary(self):
|
45
|
+
|
46
|
+
metadata = self.metadata
|
47
|
+
|
48
|
+
print("Huff Model Customer Origins")
|
49
|
+
print("No. locations " + str(metadata["no_points"]))
|
50
|
+
|
51
|
+
if metadata["marketsize_col"] is None:
|
52
|
+
print("Market size column not defined")
|
53
|
+
else:
|
54
|
+
print("Market size column " + metadata["marketsize_col"])
|
55
|
+
|
56
|
+
if metadata["weighting"][0]["func"] is None and metadata["weighting"][0]["param"] is None:
|
57
|
+
print("Transport cost weighting not defined")
|
58
|
+
else:
|
59
|
+
print("Transport cost weighting " + metadata["weighting"][0]["func"] + " with lambda = " + str(metadata["weighting"][0]["param"]))
|
60
|
+
|
61
|
+
print("Unique ID column " + metadata["unique_id"])
|
62
|
+
print("Input CRS " + str(metadata["crs_input"]))
|
63
|
+
|
64
|
+
return metadata
|
65
|
+
|
66
|
+
def define_marketsize(
|
67
|
+
self,
|
68
|
+
marketsize_col
|
69
|
+
):
|
70
|
+
|
71
|
+
geodata_gpd_original = self.geodata_gpd_original
|
72
|
+
metadata = self.metadata
|
73
|
+
|
74
|
+
if marketsize_col not in geodata_gpd_original.columns:
|
75
|
+
raise KeyError ("Column " + marketsize_col + " not in data")
|
76
|
+
else:
|
77
|
+
metadata["marketsize_col"] = marketsize_col
|
78
|
+
|
79
|
+
self.metadata = metadata
|
80
|
+
|
81
|
+
return self
|
82
|
+
|
83
|
+
def define_transportcosts_weighting(
|
84
|
+
self,
|
85
|
+
func = "power",
|
86
|
+
param_lambda = -2
|
87
|
+
):
|
88
|
+
|
89
|
+
metadata = self.metadata
|
90
|
+
|
91
|
+
metadata["weighting"][0]["func"] = func
|
92
|
+
metadata["weighting"][0]["param"] = param_lambda
|
93
|
+
|
94
|
+
self.metadata = metadata
|
95
|
+
|
96
|
+
return self
|
97
|
+
|
98
|
+
class SupplyLocations:
|
99
|
+
|
100
|
+
def __init__(
|
101
|
+
self,
|
102
|
+
geodata_gpd,
|
103
|
+
geodata_gpd_original,
|
104
|
+
metadata
|
105
|
+
):
|
106
|
+
|
107
|
+
self.geodata_gpd = geodata_gpd
|
108
|
+
self.geodata_gpd_original = geodata_gpd_original
|
109
|
+
self.metadata = metadata
|
110
|
+
|
111
|
+
def get_geodata_gpd(self):
|
112
|
+
return self.geodata_gpd
|
113
|
+
|
114
|
+
def get_geodata_gpd_original(self):
|
115
|
+
return self.geodata_gpd_original
|
116
|
+
|
117
|
+
def get_metadata(self):
|
118
|
+
return self.metadata
|
119
|
+
|
120
|
+
def summary(self):
|
121
|
+
|
122
|
+
metadata = self.metadata
|
123
|
+
|
124
|
+
print("Huff Model Supply Locations")
|
125
|
+
print("No. locations " + str(metadata["no_points"]))
|
126
|
+
|
127
|
+
if metadata["attraction_col"][0] is None or metadata["attraction_col"] == []:
|
128
|
+
print("Attraction column(s) not defined")
|
129
|
+
else:
|
130
|
+
print("Attraction column(s) " + ",".join(metadata["attraction_col"]))
|
131
|
+
|
132
|
+
if metadata["weighting"][0]["func"] is None and metadata["weighting"][0]["param"] is None:
|
133
|
+
print("Attraction weighting not defined")
|
134
|
+
else:
|
135
|
+
print("Attraction weighting " + metadata["weighting"][0]["func"] + " with gamma = " + str(metadata["weighting"][0]["param"]))
|
136
|
+
|
137
|
+
print("Unique ID column " + metadata["unique_id"])
|
138
|
+
print("Input CRS " + str(metadata["crs_input"]))
|
139
|
+
|
140
|
+
return metadata
|
141
|
+
|
142
|
+
def define_attraction(
|
143
|
+
self,
|
144
|
+
attraction_col
|
145
|
+
):
|
146
|
+
|
147
|
+
geodata_gpd_original = self.geodata_gpd_original
|
148
|
+
metadata = self.metadata
|
149
|
+
|
150
|
+
if attraction_col not in geodata_gpd_original.columns:
|
151
|
+
raise KeyError ("Column " + attraction_col + " not in data")
|
152
|
+
else:
|
153
|
+
metadata["attraction_col"][0] = attraction_col
|
154
|
+
|
155
|
+
self.metadata = metadata
|
156
|
+
|
157
|
+
return self
|
158
|
+
|
159
|
+
def define_attraction_weighting(
|
160
|
+
self,
|
161
|
+
func = "power",
|
162
|
+
param_gamma = 1
|
163
|
+
):
|
164
|
+
|
165
|
+
metadata = self.metadata
|
166
|
+
|
167
|
+
if metadata["attraction_col"] is None:
|
168
|
+
raise ValueError ("Attraction column is not yet defined. Use SupplyLocations.define_attraction()")
|
169
|
+
|
170
|
+
metadata["weighting"][0]["func"] = func
|
171
|
+
metadata["weighting"][0]["param"] = param_gamma
|
172
|
+
self.metadata = metadata
|
173
|
+
|
174
|
+
return self
|
175
|
+
|
176
|
+
def add_var(
|
177
|
+
self,
|
178
|
+
var: str = None,
|
179
|
+
func: str = None,
|
180
|
+
param: float = None
|
181
|
+
):
|
182
|
+
|
183
|
+
metadata = self.metadata
|
184
|
+
|
185
|
+
if metadata["attraction_col"] is None:
|
186
|
+
raise ValueError ("Attraction column is not yet defined. Use SupplyLocations.define_attraction()")
|
187
|
+
|
188
|
+
no_attraction_vars = len(metadata["attraction_col"])
|
189
|
+
new_key = no_attraction_vars
|
190
|
+
|
191
|
+
metadata["attraction_col"] = metadata["attraction_col"] + [var]
|
192
|
+
|
193
|
+
metadata["weighting"][new_key] = {
|
194
|
+
"func": func,
|
195
|
+
"param": param
|
196
|
+
}
|
197
|
+
|
198
|
+
self.metadata = metadata
|
199
|
+
|
200
|
+
return self
|
201
|
+
|
202
|
+
class InteractionMatrix:
|
203
|
+
|
204
|
+
def __init__(
|
205
|
+
self,
|
206
|
+
interaction_matrix_df,
|
207
|
+
market_areas_df,
|
208
|
+
customer_origins,
|
209
|
+
supply_locations
|
210
|
+
):
|
211
|
+
|
212
|
+
self.interaction_matrix_df = interaction_matrix_df
|
213
|
+
self.market_areas_df = market_areas_df
|
214
|
+
self.customer_origins = customer_origins
|
215
|
+
self.supply_locations = supply_locations
|
216
|
+
|
217
|
+
def get_interaction_matrix_df(self):
|
218
|
+
return self.interaction_matrix_df
|
219
|
+
|
220
|
+
def get_market_areas_df(self):
|
221
|
+
return self.market_areas_df
|
222
|
+
|
223
|
+
def get_customer_origins(self):
|
224
|
+
return self.customer_origins
|
225
|
+
|
226
|
+
def get_supply_locations(self):
|
227
|
+
return self.supply_locations
|
228
|
+
|
229
|
+
def summary(self):
|
230
|
+
|
231
|
+
customer_origins_metadata = self.get_customer_origins().get_metadata()
|
232
|
+
supply_locations_metadata = self.get_supply_locations().get_metadata()
|
233
|
+
|
234
|
+
print("Huff Model")
|
235
|
+
print("----------------------------------")
|
236
|
+
print("Supply locations " + str(supply_locations_metadata["no_points"]))
|
237
|
+
if supply_locations_metadata["attraction_col"][0] is None:
|
238
|
+
print("Attraction column not defined")
|
239
|
+
else:
|
240
|
+
print("Attraction column " + supply_locations_metadata["attraction_col"][0])
|
241
|
+
print("Customer origins " + str(customer_origins_metadata["no_points"]))
|
242
|
+
if customer_origins_metadata["marketsize_col"] is None:
|
243
|
+
print("Market size column not defined")
|
244
|
+
else:
|
245
|
+
print("Market size column " + customer_origins_metadata["marketsize_col"])
|
246
|
+
print("----------------------------------")
|
247
|
+
print("Weights")
|
248
|
+
if supply_locations_metadata["weighting"][0]["func"] is None and supply_locations_metadata["weighting"][0]["param"] is None:
|
249
|
+
print("Gamma not defined")
|
250
|
+
else:
|
251
|
+
print("Gamma " + str(supply_locations_metadata["weighting"][0]["param"]) + " (" + supply_locations_metadata["weighting"][0]["func"] + ")")
|
252
|
+
if customer_origins_metadata["weighting"][0]["func"] is None and customer_origins_metadata["weighting"][0]["param"] is None:
|
253
|
+
print("Lambda not defined")
|
254
|
+
else:
|
255
|
+
print("Lambda " + str(customer_origins_metadata["weighting"][0]["param"]) + " (" + customer_origins_metadata["weighting"][0]["func"] + ")")
|
256
|
+
print("----------------------------------")
|
257
|
+
|
258
|
+
def transport_costs(
|
259
|
+
self,
|
260
|
+
range_type: str = "time",
|
261
|
+
time_unit: str = "minutes",
|
262
|
+
ors_auth: str = None,
|
263
|
+
save_output: bool = False,
|
264
|
+
output_filepath: str = "transport_costs_matrix.csv"
|
265
|
+
):
|
266
|
+
|
267
|
+
interaction_matrix_df = self.get_interaction_matrix_df()
|
268
|
+
|
269
|
+
customer_origins = self.get_customer_origins()
|
270
|
+
customer_origins_geodata_gpd = customer_origins.get_geodata_gpd()
|
271
|
+
customer_origins_metadata = customer_origins.get_metadata()
|
272
|
+
customer_origins_uniqueid = customer_origins_metadata["unique_id"]
|
273
|
+
customer_origins_coords = [[point.x, point.y] for point in customer_origins_geodata_gpd.geometry]
|
274
|
+
customer_origins_ids = customer_origins_geodata_gpd[customer_origins_uniqueid].tolist()
|
275
|
+
|
276
|
+
supply_locations = self.get_supply_locations()
|
277
|
+
supply_locations_geodata_gpd = supply_locations.get_geodata_gpd()
|
278
|
+
supply_locations_metadata = supply_locations.get_metadata()
|
279
|
+
supply_locations_uniqueid = supply_locations_metadata["unique_id"]
|
280
|
+
supply_locations_coords = [[point.x, point.y] for point in supply_locations_geodata_gpd.geometry]
|
281
|
+
supply_locations_ids = supply_locations_geodata_gpd[supply_locations_uniqueid].tolist()
|
282
|
+
|
283
|
+
locations_coords = customer_origins_coords + supply_locations_coords
|
284
|
+
|
285
|
+
customer_origins_index = list(range(len(customer_origins_coords)))
|
286
|
+
locations_coords_index = list(range(len(customer_origins_index), len(locations_coords)))
|
287
|
+
|
288
|
+
time_distance_matrix = matrix(
|
289
|
+
auth = ors_auth,
|
290
|
+
locations = locations_coords,
|
291
|
+
save_output = save_output,
|
292
|
+
output_filepath = output_filepath,
|
293
|
+
sources = customer_origins_index,
|
294
|
+
destinations = locations_coords_index,
|
295
|
+
range_type = range_type
|
296
|
+
)
|
297
|
+
|
298
|
+
transport_costs_matrix = time_distance_matrix.get_matrix()
|
299
|
+
transport_costs_matrix_config = time_distance_matrix.get_config()
|
300
|
+
range_type = transport_costs_matrix_config["range_type"]
|
301
|
+
|
302
|
+
transport_costs_matrix["source"] = transport_costs_matrix["source"].astype(int)
|
303
|
+
transport_costs_matrix["source"] = transport_costs_matrix["source"].map(
|
304
|
+
dict(enumerate(customer_origins_ids))
|
305
|
+
)
|
306
|
+
|
307
|
+
transport_costs_matrix["destination"] = transport_costs_matrix["destination"].astype(int)
|
308
|
+
transport_costs_matrix["destination"] = transport_costs_matrix["destination"].map(
|
309
|
+
dict(enumerate(supply_locations_ids))
|
310
|
+
)
|
311
|
+
|
312
|
+
transport_costs_matrix["source_destination"] = transport_costs_matrix["source"].astype(str)+"_"+transport_costs_matrix["destination"].astype(str)
|
313
|
+
transport_costs_matrix = transport_costs_matrix[["source_destination", range_type]]
|
314
|
+
|
315
|
+
interaction_matrix_df = interaction_matrix_df.merge(
|
316
|
+
transport_costs_matrix,
|
317
|
+
left_on="ij",
|
318
|
+
right_on="source_destination"
|
319
|
+
)
|
320
|
+
|
321
|
+
interaction_matrix_df["t_ij"] = interaction_matrix_df[range_type]
|
322
|
+
if time_unit == "minutes":
|
323
|
+
interaction_matrix_df["t_ij"] = interaction_matrix_df["t_ij"]/60
|
324
|
+
if time_unit == "hours":
|
325
|
+
interaction_matrix_df["t_ij"] = interaction_matrix_df["t_ij"]/60/60
|
326
|
+
|
327
|
+
interaction_matrix_df = interaction_matrix_df.drop(columns=["source_destination", range_type])
|
328
|
+
|
329
|
+
self.interaction_matrix_df = interaction_matrix_df
|
330
|
+
|
331
|
+
return self
|
332
|
+
|
333
|
+
def utility(self):
|
334
|
+
|
335
|
+
interaction_matrix_df = self.interaction_matrix_df
|
336
|
+
|
337
|
+
if interaction_matrix_df["t_ij"].isna().all():
|
338
|
+
raise ValueError ("Transport cost variable is not defined")
|
339
|
+
if interaction_matrix_df["A_j"].isna().all():
|
340
|
+
raise ValueError ("Attraction variable is not defined")
|
341
|
+
|
342
|
+
customer_origins = self.customer_origins
|
343
|
+
customer_origins_metadata = customer_origins.get_metadata()
|
344
|
+
tc_weighting = customer_origins_metadata["weighting"][0]
|
345
|
+
if tc_weighting["func"] == "power":
|
346
|
+
interaction_matrix_df["t_ij_weighted"] = interaction_matrix_df["t_ij"] ** tc_weighting["param"]
|
347
|
+
elif tc_weighting["func"] == "exponential":
|
348
|
+
interaction_matrix_df["t_ij_weighted"] = np.exp(tc_weighting["param"] * interaction_matrix_df['t_ij'])
|
349
|
+
else:
|
350
|
+
raise ValueError ("Transport costs weighting is not defined.")
|
351
|
+
|
352
|
+
supply_locations = self.supply_locations
|
353
|
+
supply_locations_metadata = supply_locations.get_metadata()
|
354
|
+
attraction_weighting = supply_locations_metadata["weighting"][0]
|
355
|
+
if attraction_weighting["func"] == "power":
|
356
|
+
interaction_matrix_df["A_j_weighted"] = interaction_matrix_df["A_j"] ** attraction_weighting["param"]
|
357
|
+
elif tc_weighting["func"] == "exponential":
|
358
|
+
interaction_matrix_df["A_j_weighted"] = np.exp(attraction_weighting["param"] * interaction_matrix_df['A_j'])
|
359
|
+
else:
|
360
|
+
raise ValueError ("Attraction weighting is not defined.")
|
361
|
+
|
362
|
+
interaction_matrix_df["U_ij"] = interaction_matrix_df["A_j_weighted"]/interaction_matrix_df["t_ij_weighted"]
|
363
|
+
|
364
|
+
interaction_matrix_df = interaction_matrix_df.drop(columns=['A_j_weighted', 't_ij_weighted'])
|
365
|
+
|
366
|
+
self.interaction_matrix_df = interaction_matrix_df
|
367
|
+
|
368
|
+
return self
|
369
|
+
|
370
|
+
def probabilities (self):
|
371
|
+
|
372
|
+
interaction_matrix_df = self.interaction_matrix_df
|
373
|
+
|
374
|
+
if interaction_matrix_df["U_ij"].isna().all():
|
375
|
+
self.utility()
|
376
|
+
interaction_matrix_df = self.interaction_matrix_df
|
377
|
+
|
378
|
+
utility_i = pd.DataFrame(interaction_matrix_df.groupby("i")["U_ij"].sum())
|
379
|
+
utility_i = utility_i.rename(columns = {"U_ij": "U_i"})
|
380
|
+
|
381
|
+
interaction_matrix_df = interaction_matrix_df.merge(
|
382
|
+
utility_i,
|
383
|
+
left_on="i",
|
384
|
+
right_on="i",
|
385
|
+
how="inner"
|
386
|
+
)
|
387
|
+
|
388
|
+
interaction_matrix_df["p_ij"] = (interaction_matrix_df["U_ij"]) / (interaction_matrix_df["U_i"])
|
389
|
+
|
390
|
+
interaction_matrix_df = interaction_matrix_df.drop(columns=["U_i"])
|
391
|
+
|
392
|
+
self.interaction_matrix_df = interaction_matrix_df
|
393
|
+
|
394
|
+
return self
|
395
|
+
|
396
|
+
def flows (self):
|
397
|
+
|
398
|
+
interaction_matrix_df = self.interaction_matrix_df
|
399
|
+
|
400
|
+
if interaction_matrix_df["C_i"].isna().all():
|
401
|
+
raise ValueError ("Market size column in customer origins not defined. Use CustomerOrigins.define_marketsize()")
|
402
|
+
|
403
|
+
if interaction_matrix_df["p_ij"].isna().all():
|
404
|
+
self.probabilities()
|
405
|
+
interaction_matrix_df = self.interaction_matrix_df
|
406
|
+
|
407
|
+
interaction_matrix_df["E_ij"] = interaction_matrix_df["p_ij"] * interaction_matrix_df["C_i"]
|
408
|
+
|
409
|
+
self.interaction_matrix_df = interaction_matrix_df
|
410
|
+
|
411
|
+
return self
|
412
|
+
|
413
|
+
def marketareas (self):
|
414
|
+
|
415
|
+
interaction_matrix_df = self.interaction_matrix_df
|
416
|
+
|
417
|
+
market_areas_df = pd.DataFrame(interaction_matrix_df.groupby("j")["E_ij"].sum())
|
418
|
+
market_areas_df = market_areas_df.reset_index(drop=False)
|
419
|
+
market_areas_df = market_areas_df.rename(columns={"E_ij": "T_j"})
|
420
|
+
|
421
|
+
print(market_areas_df)
|
422
|
+
|
423
|
+
self.market_areas_df = market_areas_df
|
424
|
+
|
425
|
+
return self
|
426
|
+
|
427
|
+
def load_geodata (
|
428
|
+
file,
|
429
|
+
location_type: str,
|
430
|
+
unique_id: str,
|
431
|
+
x_col: str = None,
|
432
|
+
y_col: str = None,
|
433
|
+
data_type = "shp",
|
434
|
+
csv_sep = ";",
|
435
|
+
csv_decimal = ",",
|
436
|
+
csv_encoding="unicode_escape",
|
437
|
+
crs_input = "EPSG:4326"
|
438
|
+
):
|
439
|
+
|
440
|
+
if location_type is None or (location_type != "origins" and location_type != "destinations"):
|
441
|
+
raise ValueError ("location_type must be either 'loc' or 'dest'")
|
442
|
+
|
443
|
+
if data_type not in ["shp", "csv", "xlsx"]:
|
444
|
+
raise ValueError ("data_type must be 'shp', 'csv' or 'xlsx'")
|
445
|
+
|
446
|
+
if data_type == "shp":
|
447
|
+
geodata_gpd_original = gp.read_file(file)
|
448
|
+
crs_input = geodata_gpd_original.crs
|
449
|
+
|
450
|
+
if data_type == "csv" or data_type == "xlsx":
|
451
|
+
if x_col is None:
|
452
|
+
raise ValueError ("Missing value for X coordinate column")
|
453
|
+
if y_col is None:
|
454
|
+
raise ValueError ("Missing value for Y coordinate column")
|
455
|
+
|
456
|
+
if data_type == "csv":
|
457
|
+
geodata_tab = pd.read_csv(
|
458
|
+
file,
|
459
|
+
sep = csv_sep,
|
460
|
+
decimal = csv_decimal,
|
461
|
+
encoding = csv_encoding
|
462
|
+
)
|
463
|
+
|
464
|
+
if data_type == "xlsx":
|
465
|
+
geodata_tab = pd.read_excel(file)
|
466
|
+
|
467
|
+
if data_type == "csv" or data_type == "xlsx":
|
468
|
+
geodata_gpd_original = gp.GeoDataFrame(
|
469
|
+
geodata_tab,
|
470
|
+
geometry = gp.points_from_xy(
|
471
|
+
geodata_tab[x_col],
|
472
|
+
geodata_tab[y_col]
|
473
|
+
),
|
474
|
+
crs = crs_input
|
475
|
+
)
|
476
|
+
|
477
|
+
crs_output = "EPSG:4326"
|
478
|
+
geodata_gpd = geodata_gpd_original.to_crs(crs_output)
|
479
|
+
geodata_gpd = geodata_gpd[[unique_id, "geometry"]]
|
480
|
+
|
481
|
+
metadata = {
|
482
|
+
"location_type": location_type,
|
483
|
+
"unique_id": unique_id,
|
484
|
+
"attraction_col": [None],
|
485
|
+
"marketsize_col": None,
|
486
|
+
"weighting": {
|
487
|
+
0: {
|
488
|
+
"func": None,
|
489
|
+
"param": None
|
490
|
+
}
|
491
|
+
},
|
492
|
+
"crs_input": crs_input,
|
493
|
+
"crs_output": crs_output,
|
494
|
+
"no_points": len(geodata_gpd)
|
495
|
+
}
|
496
|
+
|
497
|
+
if location_type == "origins":
|
498
|
+
geodata_object = CustomerOrigins(
|
499
|
+
geodata_gpd,
|
500
|
+
geodata_gpd_original,
|
501
|
+
metadata
|
502
|
+
)
|
503
|
+
elif location_type == "destinations":
|
504
|
+
geodata_object = SupplyLocations(
|
505
|
+
geodata_gpd,
|
506
|
+
geodata_gpd_original,
|
507
|
+
metadata
|
508
|
+
)
|
509
|
+
|
510
|
+
return geodata_object
|
511
|
+
|
512
|
+
def create_interaction_matrix(
|
513
|
+
customer_origins,
|
514
|
+
supply_locations
|
515
|
+
):
|
516
|
+
|
517
|
+
if not isinstance(customer_origins, CustomerOrigins):
|
518
|
+
raise ValueError ("customer_origins must be of class CustomerOrigins")
|
519
|
+
if not isinstance(supply_locations, SupplyLocations):
|
520
|
+
raise ValueError ("supply_locations must be of class supply_locations")
|
521
|
+
|
522
|
+
customer_origins_metadata = customer_origins.get_metadata()
|
523
|
+
if customer_origins_metadata["marketsize_col"] is None:
|
524
|
+
raise ValueError("Market size column in customer origins not defined. Use CustomerOrigins.define_marketsize()")
|
525
|
+
|
526
|
+
supply_locations_metadata = supply_locations.get_metadata()
|
527
|
+
if supply_locations_metadata["attraction_col"][0] is None:
|
528
|
+
raise ValueError("Attraction column in supply locations not defined. Use SupplyLocations.define_attraction()")
|
529
|
+
|
530
|
+
customer_origins_unique_id = customer_origins_metadata["unique_id"]
|
531
|
+
customer_origins_marketsize = customer_origins_metadata["marketsize_col"]
|
532
|
+
|
533
|
+
customer_origins_geodata_gpd = pd.DataFrame(customer_origins.get_geodata_gpd())
|
534
|
+
customer_origins_geodata_gpd_original = pd.DataFrame(customer_origins.get_geodata_gpd_original())
|
535
|
+
customer_origins_data = pd.merge(
|
536
|
+
customer_origins_geodata_gpd,
|
537
|
+
customer_origins_geodata_gpd_original[[customer_origins_unique_id, customer_origins_marketsize]],
|
538
|
+
left_on = customer_origins_unique_id,
|
539
|
+
right_on = customer_origins_unique_id
|
540
|
+
)
|
541
|
+
customer_origins_data = customer_origins_data.rename(columns = {
|
542
|
+
customer_origins_unique_id: "i",
|
543
|
+
customer_origins_marketsize: "C_i",
|
544
|
+
"geometry": "i_coords"
|
545
|
+
}
|
546
|
+
)
|
547
|
+
|
548
|
+
supply_locations_unique_id = supply_locations_metadata["unique_id"]
|
549
|
+
supply_locations_attraction = supply_locations_metadata["attraction_col"][0]
|
550
|
+
|
551
|
+
supply_locations_geodata_gpd = pd.DataFrame(supply_locations.get_geodata_gpd())
|
552
|
+
supply_locations_geodata_gpd_original = pd.DataFrame(supply_locations.get_geodata_gpd_original())
|
553
|
+
supply_locations_data = pd.merge(
|
554
|
+
supply_locations_geodata_gpd,
|
555
|
+
supply_locations_geodata_gpd_original[[supply_locations_unique_id, supply_locations_attraction]],
|
556
|
+
left_on = supply_locations_unique_id,
|
557
|
+
right_on = supply_locations_unique_id
|
558
|
+
)
|
559
|
+
supply_locations_data = supply_locations_data.rename(columns = {
|
560
|
+
supply_locations_unique_id: "j",
|
561
|
+
supply_locations_attraction: "A_j",
|
562
|
+
"geometry": "j_coords"
|
563
|
+
}
|
564
|
+
)
|
565
|
+
|
566
|
+
interaction_matrix_df = customer_origins_data.merge(
|
567
|
+
supply_locations_data,
|
568
|
+
how = "cross"
|
569
|
+
)
|
570
|
+
interaction_matrix_df["ij"] = interaction_matrix_df["i"].astype(str)+"_"+interaction_matrix_df["j"].astype(str)
|
571
|
+
interaction_matrix_df["t_ij"] = None
|
572
|
+
interaction_matrix_df["U_ij"] = None
|
573
|
+
interaction_matrix_df["p_ij"] = None
|
574
|
+
interaction_matrix_df["E_ij"] = None
|
575
|
+
|
576
|
+
market_areas_df = None
|
577
|
+
|
578
|
+
interaction_matrix = InteractionMatrix(
|
579
|
+
interaction_matrix_df,
|
580
|
+
market_areas_df,
|
581
|
+
customer_origins,
|
582
|
+
supply_locations
|
583
|
+
)
|
584
|
+
|
585
|
+
return interaction_matrix
|
huff/ors.py
ADDED
@@ -0,0 +1,334 @@
|
|
1
|
+
#-----------------------------------------------------------------------
|
2
|
+
# Name: ors (huff package)
|
3
|
+
# Purpose: OpenRouteService client
|
4
|
+
# Author: Thomas Wieland
|
5
|
+
# ORCID: 0000-0001-5168-9846
|
6
|
+
# mail: geowieland@googlemail.com
|
7
|
+
# Version: 1.0.0
|
8
|
+
# Last update: 2025-04-25 18:14
|
9
|
+
# Copyright (c) 2025 Thomas Wieland
|
10
|
+
#-----------------------------------------------------------------------
|
11
|
+
|
12
|
+
|
13
|
+
import pandas as pd
|
14
|
+
import requests
|
15
|
+
import geopandas as gp
|
16
|
+
from shapely.geometry import Point, MultiPoint, shape, Polygon
|
17
|
+
from shapely.ops import unary_union
|
18
|
+
|
19
|
+
|
20
|
+
class Isochrone:
|
21
|
+
|
22
|
+
def __init__(
|
23
|
+
self,
|
24
|
+
isochrones_gdf,
|
25
|
+
metadata,
|
26
|
+
status_code,
|
27
|
+
save_config
|
28
|
+
):
|
29
|
+
|
30
|
+
self.isochrones_gdf = isochrones_gdf
|
31
|
+
self.metadata = metadata
|
32
|
+
self.status_code = status_code
|
33
|
+
self.save_config = save_config
|
34
|
+
|
35
|
+
def summary(self):
|
36
|
+
|
37
|
+
metadata = self.metadata
|
38
|
+
status_code = self.status_code
|
39
|
+
|
40
|
+
range_str = [str(range) for range in metadata["query"]["range"]]
|
41
|
+
profile = metadata["query"]["profile"]
|
42
|
+
range_type = metadata["query"]["range_type"]
|
43
|
+
no_locations = len(metadata["query"]["locations"]) #[str(seg) for seg in metadata["query"]["locations"]]
|
44
|
+
|
45
|
+
print("Locations: " + str(no_locations))
|
46
|
+
print("Segments: " + ", ".join(range_str))
|
47
|
+
print("Range type: " + range_type)
|
48
|
+
print("Profile: " + profile)
|
49
|
+
print("Status code: " + str(status_code))
|
50
|
+
|
51
|
+
|
52
|
+
class TimeDistanceMatrix:
|
53
|
+
|
54
|
+
def __init__(
|
55
|
+
self,
|
56
|
+
matrix_df,
|
57
|
+
metadata,
|
58
|
+
status_code,
|
59
|
+
save_config
|
60
|
+
):
|
61
|
+
|
62
|
+
self.matrix_df = matrix_df
|
63
|
+
self.metadata = metadata
|
64
|
+
self.status_code = status_code
|
65
|
+
self.save_config = save_config
|
66
|
+
|
67
|
+
def get_matrix(self):
|
68
|
+
|
69
|
+
return self.matrix_df
|
70
|
+
|
71
|
+
def get_metadata(self):
|
72
|
+
|
73
|
+
return self.metadata
|
74
|
+
|
75
|
+
def get_config(self):
|
76
|
+
|
77
|
+
return self.save_config
|
78
|
+
|
79
|
+
def summary(self):
|
80
|
+
|
81
|
+
metadata = self.metadata
|
82
|
+
status_code = self.status_code
|
83
|
+
|
84
|
+
pass # TODO ??
|
85
|
+
|
86
|
+
|
87
|
+
def isochrone(
|
88
|
+
auth: str,
|
89
|
+
locations: list,
|
90
|
+
id: list = [],
|
91
|
+
segments: list = [900, 600, 300],
|
92
|
+
range_type: str = "time",
|
93
|
+
intersections: str = "true",
|
94
|
+
profile: str = "driving-car",
|
95
|
+
save_output: bool = True,
|
96
|
+
output_filepath: str = "isochrones.shp",
|
97
|
+
output_crs: str = "EPSG:2056",
|
98
|
+
verbose: bool = True
|
99
|
+
):
|
100
|
+
|
101
|
+
ors_url = "https://api.openrouteservice.org/v2/isochrones/" + profile
|
102
|
+
|
103
|
+
headers = {
|
104
|
+
"Content-Type": "application/json; charset=utf-8",
|
105
|
+
"Accept": "application/json, application/geo+json, application/gpx+xml, img/png; charset=utf-8",
|
106
|
+
"Authorization": auth
|
107
|
+
}
|
108
|
+
|
109
|
+
body = {
|
110
|
+
"locations": locations,
|
111
|
+
"range": segments,
|
112
|
+
"intersections": intersections,
|
113
|
+
"range_type": range_type
|
114
|
+
}
|
115
|
+
|
116
|
+
try:
|
117
|
+
|
118
|
+
response = requests.post(
|
119
|
+
ors_url,
|
120
|
+
headers=headers,
|
121
|
+
json=body
|
122
|
+
)
|
123
|
+
|
124
|
+
except:
|
125
|
+
|
126
|
+
if verbose is True:
|
127
|
+
print ("Unknown error while accessing ORS server")
|
128
|
+
|
129
|
+
status_code = 99999
|
130
|
+
isochrones_gdf = None
|
131
|
+
metadata = None
|
132
|
+
|
133
|
+
status_code = response.status_code
|
134
|
+
|
135
|
+
if status_code == 200:
|
136
|
+
|
137
|
+
if verbose is True:
|
138
|
+
print ("Accessing ORS server successful")
|
139
|
+
|
140
|
+
response_json = response.json()
|
141
|
+
|
142
|
+
metadata = response_json["metadata"]
|
143
|
+
|
144
|
+
features = response_json["features"]
|
145
|
+
geometries = [shape(feature["geometry"]) for feature in features]
|
146
|
+
|
147
|
+
isochrones_gdf = gp.GeoDataFrame(
|
148
|
+
features,
|
149
|
+
geometry=geometries,
|
150
|
+
crs="EPSG:4326"
|
151
|
+
)
|
152
|
+
|
153
|
+
isochrones_gdf["segment"] = 0
|
154
|
+
isochrones_gdf_properties_dict = dict(isochrones_gdf["properties"])
|
155
|
+
|
156
|
+
for i in range(len(isochrones_gdf_properties_dict)):
|
157
|
+
isochrones_gdf.iloc[i,3] = isochrones_gdf_properties_dict[i]["value"]
|
158
|
+
|
159
|
+
isochrones_gdf = isochrones_gdf.drop(columns=["properties"])
|
160
|
+
isochrones_gdf = isochrones_gdf.to_crs(output_crs)
|
161
|
+
|
162
|
+
if save_output:
|
163
|
+
isochrones_gdf.to_file(output_filepath)
|
164
|
+
print ("Saved as", output_filepath)
|
165
|
+
|
166
|
+
else:
|
167
|
+
|
168
|
+
if verbose is True:
|
169
|
+
print ("Error while accessing ORS server. Status Code: " + str(status_code))
|
170
|
+
|
171
|
+
isochrones_gdf = None
|
172
|
+
metadata = None
|
173
|
+
|
174
|
+
save_config = {
|
175
|
+
"range_type": range_type,
|
176
|
+
"save_output": save_output,
|
177
|
+
"output_filepath" : output_filepath,
|
178
|
+
"output_crs": output_crs
|
179
|
+
}
|
180
|
+
|
181
|
+
isochrone_output = Isochrone(
|
182
|
+
isochrones_gdf,
|
183
|
+
metadata,
|
184
|
+
status_code,
|
185
|
+
save_config
|
186
|
+
)
|
187
|
+
|
188
|
+
return isochrone_output
|
189
|
+
|
190
|
+
def matrix(
|
191
|
+
auth,
|
192
|
+
locations: list,
|
193
|
+
sources: list = [],
|
194
|
+
destinations: list = [],
|
195
|
+
id: str = None,
|
196
|
+
range_type = "time",
|
197
|
+
metrics: list = [],
|
198
|
+
resolve_locations: bool = False,
|
199
|
+
units: str = "mi",
|
200
|
+
save_output = False,
|
201
|
+
output_filepath = "matrix.csv",
|
202
|
+
csv_sep = ";",
|
203
|
+
csv_decimal = ",",
|
204
|
+
csv_encoding = None,
|
205
|
+
verbose = True
|
206
|
+
):
|
207
|
+
|
208
|
+
ors_url = "https://api.openrouteservice.org/v2/matrix/driving-car"
|
209
|
+
|
210
|
+
headers = {
|
211
|
+
"Content-Type": "application/json; charset=utf-8",
|
212
|
+
"Accept": "application/json, application/geo+json, application/gpx+xml, img/png; charset=utf-8",
|
213
|
+
"Authorization": auth
|
214
|
+
}
|
215
|
+
|
216
|
+
body = {
|
217
|
+
"locations": locations,
|
218
|
+
"resolve_locations": resolve_locations
|
219
|
+
}
|
220
|
+
if id is not None:
|
221
|
+
body["id"] = id
|
222
|
+
if metrics != []:
|
223
|
+
body["metrics"] = metrics
|
224
|
+
if sources != []:
|
225
|
+
body["sources"] = sources
|
226
|
+
if destinations != []:
|
227
|
+
body["destinations"] = destinations
|
228
|
+
if units is not None:
|
229
|
+
body["units"] = units
|
230
|
+
|
231
|
+
try:
|
232
|
+
|
233
|
+
response = requests.post(
|
234
|
+
ors_url,
|
235
|
+
headers=headers,
|
236
|
+
json=body
|
237
|
+
)
|
238
|
+
|
239
|
+
except:
|
240
|
+
|
241
|
+
if verbose is True:
|
242
|
+
print ("Unknown error while accessing ORS server")
|
243
|
+
|
244
|
+
status_code = 99999
|
245
|
+
matrix_df = None
|
246
|
+
metadata = None
|
247
|
+
|
248
|
+
status_code = response.status_code
|
249
|
+
|
250
|
+
if status_code == 200:
|
251
|
+
|
252
|
+
if verbose is True:
|
253
|
+
print ("Accessing ORS server successful")
|
254
|
+
|
255
|
+
response_json = response.json()
|
256
|
+
|
257
|
+
metadata = response_json["metadata"]
|
258
|
+
|
259
|
+
matrix_df = pd.DataFrame(
|
260
|
+
columns=[
|
261
|
+
"source",
|
262
|
+
"source_lat",
|
263
|
+
"source_lon",
|
264
|
+
"source_snapped_distance",
|
265
|
+
"destination",
|
266
|
+
"destination_lat",
|
267
|
+
"destination_lon",
|
268
|
+
"destination_snapped_distance",
|
269
|
+
"source_destination",
|
270
|
+
range_type
|
271
|
+
])
|
272
|
+
|
273
|
+
for i, value in enumerate(response_json["durations"]):
|
274
|
+
|
275
|
+
source_lat = response_json["sources"][i]["location"][1]
|
276
|
+
source_lon = response_json["sources"][i]["location"][0]
|
277
|
+
source_snapped_distance = response_json["sources"][i]["snapped_distance"]
|
278
|
+
|
279
|
+
for j, entry in enumerate(value):
|
280
|
+
|
281
|
+
destination_lat = response_json["destinations"][j]["location"][1]
|
282
|
+
destination_lon = response_json["destinations"][j]["location"][0]
|
283
|
+
destination_snapped_distance = response_json["destinations"][j]["snapped_distance"]
|
284
|
+
|
285
|
+
matrix_row = pd.Series(
|
286
|
+
{
|
287
|
+
"source": str(i),
|
288
|
+
"source_lat": source_lat,
|
289
|
+
"source_lon": source_lon,
|
290
|
+
"source_snapped_distance": source_snapped_distance,
|
291
|
+
"destination": str(j),
|
292
|
+
"destination_lat": destination_lat,
|
293
|
+
"destination_lon": destination_lon,
|
294
|
+
"destination_snapped_distance": destination_snapped_distance,
|
295
|
+
"source_destination": str(i)+"_"+str(j),
|
296
|
+
range_type: entry
|
297
|
+
}
|
298
|
+
)
|
299
|
+
|
300
|
+
matrix_df = pd.concat([
|
301
|
+
matrix_df,
|
302
|
+
pd.DataFrame([matrix_row])])
|
303
|
+
|
304
|
+
if save_output:
|
305
|
+
matrix_df.to_csv(
|
306
|
+
output_filepath,
|
307
|
+
decimal = csv_decimal,
|
308
|
+
sep = csv_sep,
|
309
|
+
encoding = csv_encoding
|
310
|
+
)
|
311
|
+
print ("Saved as", output_filepath)
|
312
|
+
|
313
|
+
else:
|
314
|
+
|
315
|
+
if verbose is True:
|
316
|
+
print ("Error in accessing ORS server. Status Code: " + str(status_code))
|
317
|
+
|
318
|
+
matrix_df = None
|
319
|
+
metadata = None
|
320
|
+
|
321
|
+
save_config = {
|
322
|
+
"range_type": range_type,
|
323
|
+
"save_output": save_output,
|
324
|
+
"output_filepath": output_filepath
|
325
|
+
}
|
326
|
+
|
327
|
+
matrix_output = TimeDistanceMatrix(
|
328
|
+
matrix_df,
|
329
|
+
metadata,
|
330
|
+
status_code,
|
331
|
+
save_config
|
332
|
+
)
|
333
|
+
|
334
|
+
return matrix_output
|
huff/tests/__init__.py
ADDED
File without changes
|
@@ -0,0 +1 @@
|
|
1
|
+
UTF-8
|
Binary file
|
@@ -0,0 +1 @@
|
|
1
|
+
PROJCS["DHDN_3_Degree_Gauss_Zone_3",GEOGCS["GCS_Deutsches_Hauptdreiecksnetz",DATUM["D_Deutsches_Hauptdreiecksnetz",SPHEROID["Bessel_1841",6377397.155,299.1528128]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Gauss_Kruger"],PARAMETER["False_Easting",3500000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",9.0],PARAMETER["Scale_Factor",1.0],PARAMETER["Latitude_Of_Origin",0.0],UNIT["Meter",1.0]]
|
@@ -0,0 +1,43 @@
|
|
1
|
+
<!DOCTYPE qgis PUBLIC 'http://mrcc.com/qgis.dtd' 'SYSTEM'>
|
2
|
+
<qgis version="3.28.3-Firenze">
|
3
|
+
<identifier></identifier>
|
4
|
+
<parentidentifier></parentidentifier>
|
5
|
+
<language></language>
|
6
|
+
<type>dataset</type>
|
7
|
+
<title></title>
|
8
|
+
<abstract></abstract>
|
9
|
+
<contact>
|
10
|
+
<name></name>
|
11
|
+
<organization></organization>
|
12
|
+
<position></position>
|
13
|
+
<voice></voice>
|
14
|
+
<fax></fax>
|
15
|
+
<email></email>
|
16
|
+
<role></role>
|
17
|
+
</contact>
|
18
|
+
<links/>
|
19
|
+
<fees></fees>
|
20
|
+
<encoding></encoding>
|
21
|
+
<crs>
|
22
|
+
<spatialrefsys nativeFormat="Wkt">
|
23
|
+
<wkt>PROJCRS["DHDN / 3-degree Gauss-Kruger zone 3",BASEGEOGCRS["DHDN",DATUM["Deutsches Hauptdreiecksnetz",ELLIPSOID["Bessel 1841",6377397.155,299.1528128,LENGTHUNIT["metre",1]]],PRIMEM["Greenwich",0,ANGLEUNIT["degree",0.0174532925199433]],ID["EPSG",4314]],CONVERSION["3-degree Gauss-Kruger zone 3",METHOD["Transverse Mercator",ID["EPSG",9807]],PARAMETER["Latitude of natural origin",0,ANGLEUNIT["degree",0.0174532925199433],ID["EPSG",8801]],PARAMETER["Longitude of natural origin",9,ANGLEUNIT["degree",0.0174532925199433],ID["EPSG",8802]],PARAMETER["Scale factor at natural origin",1,SCALEUNIT["unity",1],ID["EPSG",8805]],PARAMETER["False easting",3500000,LENGTHUNIT["metre",1],ID["EPSG",8806]],PARAMETER["False northing",0,LENGTHUNIT["metre",1],ID["EPSG",8807]]],CS[Cartesian,2],AXIS["northing (X)",north,ORDER[1],LENGTHUNIT["metre",1]],AXIS["easting (Y)",east,ORDER[2],LENGTHUNIT["metre",1]],USAGE[SCOPE["Cadastre, engineering survey, topographic mapping."],AREA["Germany - former West Germany onshore between 7�30'E and 10�30'E - states of Baden-Wurtemberg, Bayern, Bremen, Hamberg, Hessen, Niedersachsen, Nordrhein-Westfalen, Rhineland-Pfalz, Schleswig-Holstein."],BBOX[47.27,7.5,55.09,10.51]],ID["EPSG",31467]]</wkt>
|
24
|
+
<proj4>+proj=tmerc +lat_0=0 +lon_0=9 +k=1 +x_0=3500000 +y_0=0 +ellps=bessel +towgs84=598.1,73.7,418.2,0.202,0.045,-2.455,6.7 +units=m +no_defs</proj4>
|
25
|
+
<srsid>2647</srsid>
|
26
|
+
<srid>31467</srid>
|
27
|
+
<authid>EPSG:31467</authid>
|
28
|
+
<description>DHDN / 3-degree Gauss-Kruger zone 3</description>
|
29
|
+
<projectionacronym>tmerc</projectionacronym>
|
30
|
+
<ellipsoidacronym>EPSG:7004</ellipsoidacronym>
|
31
|
+
<geographicflag>false</geographicflag>
|
32
|
+
</spatialrefsys>
|
33
|
+
</crs>
|
34
|
+
<extent>
|
35
|
+
<spatial maxy="0" dimensions="2" maxx="0" maxz="0" crs="EPSG:31467" miny="0" minz="0" minx="0"/>
|
36
|
+
<temporal>
|
37
|
+
<period>
|
38
|
+
<start></start>
|
39
|
+
<end></end>
|
40
|
+
</period>
|
41
|
+
</temporal>
|
42
|
+
</extent>
|
43
|
+
</qgis>
|
Binary file
|
Binary file
|
@@ -0,0 +1 @@
|
|
1
|
+
UTF-8
|
Binary file
|
@@ -0,0 +1 @@
|
|
1
|
+
PROJCS["DHDN_3_Degree_Gauss_Zone_3",GEOGCS["GCS_Deutsches_Hauptdreiecksnetz",DATUM["D_Deutsches_Hauptdreiecksnetz",SPHEROID["Bessel_1841",6377397.155,299.1528128]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Gauss_Kruger"],PARAMETER["False_Easting",3500000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",9.0],PARAMETER["Scale_Factor",1.0],PARAMETER["Latitude_Of_Origin",0.0],UNIT["Meter",1.0]]
|
@@ -0,0 +1,43 @@
|
|
1
|
+
<!DOCTYPE qgis PUBLIC 'http://mrcc.com/qgis.dtd' 'SYSTEM'>
|
2
|
+
<qgis version="3.28.3-Firenze">
|
3
|
+
<identifier></identifier>
|
4
|
+
<parentidentifier></parentidentifier>
|
5
|
+
<language></language>
|
6
|
+
<type>dataset</type>
|
7
|
+
<title></title>
|
8
|
+
<abstract></abstract>
|
9
|
+
<contact>
|
10
|
+
<name></name>
|
11
|
+
<organization></organization>
|
12
|
+
<position></position>
|
13
|
+
<voice></voice>
|
14
|
+
<fax></fax>
|
15
|
+
<email></email>
|
16
|
+
<role></role>
|
17
|
+
</contact>
|
18
|
+
<links/>
|
19
|
+
<fees></fees>
|
20
|
+
<encoding></encoding>
|
21
|
+
<crs>
|
22
|
+
<spatialrefsys nativeFormat="Wkt">
|
23
|
+
<wkt>PROJCRS["DHDN / 3-degree Gauss-Kruger zone 3",BASEGEOGCRS["DHDN",DATUM["Deutsches Hauptdreiecksnetz",ELLIPSOID["Bessel 1841",6377397.155,299.1528128,LENGTHUNIT["metre",1]]],PRIMEM["Greenwich",0,ANGLEUNIT["degree",0.0174532925199433]],ID["EPSG",4314]],CONVERSION["3-degree Gauss-Kruger zone 3",METHOD["Transverse Mercator",ID["EPSG",9807]],PARAMETER["Latitude of natural origin",0,ANGLEUNIT["degree",0.0174532925199433],ID["EPSG",8801]],PARAMETER["Longitude of natural origin",9,ANGLEUNIT["degree",0.0174532925199433],ID["EPSG",8802]],PARAMETER["Scale factor at natural origin",1,SCALEUNIT["unity",1],ID["EPSG",8805]],PARAMETER["False easting",3500000,LENGTHUNIT["metre",1],ID["EPSG",8806]],PARAMETER["False northing",0,LENGTHUNIT["metre",1],ID["EPSG",8807]]],CS[Cartesian,2],AXIS["northing (X)",north,ORDER[1],LENGTHUNIT["metre",1]],AXIS["easting (Y)",east,ORDER[2],LENGTHUNIT["metre",1]],USAGE[SCOPE["Cadastre, engineering survey, topographic mapping."],AREA["Germany - former West Germany onshore between 7�30'E and 10�30'E - states of Baden-Wurtemberg, Bayern, Bremen, Hamberg, Hessen, Niedersachsen, Nordrhein-Westfalen, Rhineland-Pfalz, Schleswig-Holstein."],BBOX[47.27,7.5,55.09,10.51]],ID["EPSG",31467]]</wkt>
|
24
|
+
<proj4>+proj=tmerc +lat_0=0 +lon_0=9 +k=1 +x_0=3500000 +y_0=0 +ellps=bessel +towgs84=598.1,73.7,418.2,0.202,0.045,-2.455,6.7 +units=m +no_defs</proj4>
|
25
|
+
<srsid>2647</srsid>
|
26
|
+
<srid>31467</srid>
|
27
|
+
<authid>EPSG:31467</authid>
|
28
|
+
<description>DHDN / 3-degree Gauss-Kruger zone 3</description>
|
29
|
+
<projectionacronym>tmerc</projectionacronym>
|
30
|
+
<ellipsoidacronym>EPSG:7004</ellipsoidacronym>
|
31
|
+
<geographicflag>false</geographicflag>
|
32
|
+
</spatialrefsys>
|
33
|
+
</crs>
|
34
|
+
<extent>
|
35
|
+
<spatial maxy="0" dimensions="2" maxx="0" maxz="0" crs="EPSG:31467" miny="0" minz="0" minx="0"/>
|
36
|
+
<temporal>
|
37
|
+
<period>
|
38
|
+
<start></start>
|
39
|
+
<end></end>
|
40
|
+
</period>
|
41
|
+
</temporal>
|
42
|
+
</extent>
|
43
|
+
</qgis>
|
Binary file
|
Binary file
|
huff/tests/tests_huff.py
ADDED
@@ -0,0 +1,89 @@
|
|
1
|
+
#-----------------------------------------------------------------------
|
2
|
+
# Name: tests_huff (huff package)
|
3
|
+
# Purpose: Tests for Huff Model package functions
|
4
|
+
# Author: Thomas Wieland
|
5
|
+
# ORCID: 0000-0001-5168-9846
|
6
|
+
# mail: geowieland@googlemail.com
|
7
|
+
# Version: 1.0.0
|
8
|
+
# Last update: 2025-04-25 18:08
|
9
|
+
# Copyright (c) 2025 Thomas Wieland
|
10
|
+
#-----------------------------------------------------------------------
|
11
|
+
|
12
|
+
|
13
|
+
from ..ors import isochrone, matrix
|
14
|
+
from ..models import load_geodata, create_interaction_matrix
|
15
|
+
|
16
|
+
|
17
|
+
# Isochrones test:
|
18
|
+
|
19
|
+
output_path = "."
|
20
|
+
|
21
|
+
isochrone_ORS = isochrone (
|
22
|
+
auth = "5b3ce3597851110001cf62480a15aafdb5a64f4d91805929f8af6abd",
|
23
|
+
locations =[[7.593301534652711, 47.54329763735186], [9.207916,49.153868]],
|
24
|
+
save_output = True,
|
25
|
+
output_filepath = "test.shp",
|
26
|
+
intersections="false"
|
27
|
+
)
|
28
|
+
|
29
|
+
isochrone_ORS.summary()
|
30
|
+
|
31
|
+
# Matrix test:
|
32
|
+
|
33
|
+
matrix_ORS = matrix(auth="5b3ce3597851110001cf62480a15aafdb5a64f4d91805929f8af6abd",
|
34
|
+
locations=[[9.70093,48.477473],[9.207916,49.153868],[37.573242,55.801281],[115.663757,38.106467]],
|
35
|
+
save_output=True,
|
36
|
+
output_filepath="testmatrix.csv"
|
37
|
+
)
|
38
|
+
|
39
|
+
print(matrix_ORS)
|
40
|
+
|
41
|
+
|
42
|
+
# Huff model test data:
|
43
|
+
|
44
|
+
Haslach = load_geodata(
|
45
|
+
"huff/tests/data/Haslach.shp",
|
46
|
+
location_type="origins",
|
47
|
+
unique_id="BEZEICHN"
|
48
|
+
)
|
49
|
+
|
50
|
+
Haslach.summary()
|
51
|
+
|
52
|
+
Haslach.define_marketsize("pop")
|
53
|
+
|
54
|
+
Haslach.define_transportcosts_weighting()
|
55
|
+
|
56
|
+
Haslach.summary()
|
57
|
+
|
58
|
+
|
59
|
+
Haslach_supermarkets = load_geodata(
|
60
|
+
"huff/tests/data/Haslach_supermarkets.shp",
|
61
|
+
location_type="destinations",
|
62
|
+
unique_id="LFDNR"
|
63
|
+
)
|
64
|
+
|
65
|
+
Haslach_supermarkets.summary()
|
66
|
+
|
67
|
+
Haslach_supermarkets.define_attraction("VKF_qm")
|
68
|
+
|
69
|
+
Haslach_supermarkets.define_attraction_weighting()
|
70
|
+
|
71
|
+
Haslach_supermarkets.summary()
|
72
|
+
|
73
|
+
|
74
|
+
haslach_interactionmatrix = create_interaction_matrix(
|
75
|
+
Haslach,
|
76
|
+
Haslach_supermarkets
|
77
|
+
)
|
78
|
+
|
79
|
+
interaction_matrix = haslach_interactionmatrix.transport_costs(
|
80
|
+
ors_auth="5b3ce3597851110001cf62480a15aafdb5a64f4d91805929f8af6abd"
|
81
|
+
)
|
82
|
+
|
83
|
+
interaction_matrix = interaction_matrix.flows()
|
84
|
+
|
85
|
+
interaction_matrix.get_interaction_matrix_df().to_excel("interaction_matrix.xlsx")
|
86
|
+
|
87
|
+
interaction_matrix.marketareas()
|
88
|
+
|
89
|
+
interaction_matrix.summary()
|
@@ -0,0 +1,32 @@
|
|
1
|
+
Metadata-Version: 2.2
|
2
|
+
Name: huff
|
3
|
+
Version: 1.0.0
|
4
|
+
Summary: huff: Huff Model Market Area Analysis
|
5
|
+
Author: Thomas Wieland
|
6
|
+
Author-email: geowieland@googlemail.com
|
7
|
+
Description-Content-Type: text/markdown
|
8
|
+
Requires-Dist: geopandas
|
9
|
+
Requires-Dist: pandas
|
10
|
+
Requires-Dist: numpy
|
11
|
+
Dynamic: author
|
12
|
+
Dynamic: author-email
|
13
|
+
Dynamic: description
|
14
|
+
Dynamic: description-content-type
|
15
|
+
Dynamic: requires-dist
|
16
|
+
Dynamic: summary
|
17
|
+
|
18
|
+
# huff: Huff Model Market Area Analysis
|
19
|
+
|
20
|
+
## Author
|
21
|
+
|
22
|
+
Thomas Wieland [ORCID](https://orcid.org/0000-0001-5168-9846) [EMail](mailto:geowieland@googlemail.com)
|
23
|
+
|
24
|
+
See the /tests directory for usage examples of most of the included functions.
|
25
|
+
|
26
|
+
|
27
|
+
## Installation
|
28
|
+
|
29
|
+
To install the package, use `pip`:
|
30
|
+
|
31
|
+
```bash
|
32
|
+
pip install huff
|
@@ -0,0 +1,22 @@
|
|
1
|
+
huff/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
2
|
+
huff/gistools.py,sha256=pyNYiRG_PwjiVlOLY26zPDIGVHE79uwyz48kGQ4k9us,1625
|
3
|
+
huff/models.py,sha256=UxFo2laIrNahWrcf-_4xVMO9eR_w7kIBeoG_w_eGD70,21476
|
4
|
+
huff/ors.py,sha256=TF1US2Tc1L4ny6-S4JFIkDzgxxrtFsD_N9A-FQBabLM,9397
|
5
|
+
huff/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
6
|
+
huff/tests/tests_huff.py,sha256=6L16suvmcYtMBmgppHnhqJSCr6OI1YJh1Bh114ElfUc,2294
|
7
|
+
huff/tests/data/Haslach.cpg,sha256=OtMDH1UDpEBK-CUmLugjLMBNTqZoPULF3QovKiesmCQ,5
|
8
|
+
huff/tests/data/Haslach.dbf,sha256=GVPIt05OzDO7UrRDcsMhiYWvyXAPg6Z-qkiysFzj-fc,506
|
9
|
+
huff/tests/data/Haslach.prj,sha256=2Jy1Vlzh7UxQ1MXpZ9UYLs2SxfrObj2xkEkZyLqmGTY,437
|
10
|
+
huff/tests/data/Haslach.qmd,sha256=j9i4_Pz7ZMSG2UDSb3nuhJpw0KWXIRhiiDymqJP6_Fo,2479
|
11
|
+
huff/tests/data/Haslach.shp,sha256=s7ks-ukOIKMJCD5x6m0MO6pwkg1USvhudQKTg74ib1E,212
|
12
|
+
huff/tests/data/Haslach.shx,sha256=VEMghRPP_HUYIuGoxR7X0eHQe9LnO4s8JP4twfzKyyk,132
|
13
|
+
huff/tests/data/Haslach_supermarkets.cpg,sha256=OtMDH1UDpEBK-CUmLugjLMBNTqZoPULF3QovKiesmCQ,5
|
14
|
+
huff/tests/data/Haslach_supermarkets.dbf,sha256=4fTBxntDvQ8qFPdGK82ywJd2Xq_9nApDyi3h5_KPFSc,21282
|
15
|
+
huff/tests/data/Haslach_supermarkets.prj,sha256=2Jy1Vlzh7UxQ1MXpZ9UYLs2SxfrObj2xkEkZyLqmGTY,437
|
16
|
+
huff/tests/data/Haslach_supermarkets.qmd,sha256=j9i4_Pz7ZMSG2UDSb3nuhJpw0KWXIRhiiDymqJP6_Fo,2479
|
17
|
+
huff/tests/data/Haslach_supermarkets.shp,sha256=X7QbQ0BTMag_B-bDRbpr-go2BQIXo3Y8zMAKpYZmlps,324
|
18
|
+
huff/tests/data/Haslach_supermarkets.shx,sha256=j23QHX-SmdAeN04rw0x8nUOran-OCg_T6r_LvzzEPWs,164
|
19
|
+
huff-1.0.0.dist-info/METADATA,sha256=Xl1URHFyO5OqALQcLsBtENr-TNO5cm0b7xo8479hOyg,749
|
20
|
+
huff-1.0.0.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
|
21
|
+
huff-1.0.0.dist-info/top_level.txt,sha256=nlzX-PxZNFmIxANIJMySuIFPihd6qOBkRlhIC28NEsQ,5
|
22
|
+
huff-1.0.0.dist-info/RECORD,,
|
@@ -0,0 +1 @@
|
|
1
|
+
huff
|