huff 1.0.0__py3-none-any.whl → 1.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
huff/gistools.py CHANGED
@@ -4,8 +4,8 @@
4
4
  # Author: Thomas Wieland
5
5
  # ORCID: 0000-0001-5168-9846
6
6
  # mail: geowieland@googlemail.com
7
- # Version: 1.0.0
8
- # Last update: 2025-04-25 18:16
7
+ # Version: 1.1.0
8
+ # Last update: 2025-04-28 19:36
9
9
  # Copyright (c) 2025 Thomas Wieland
10
10
  #-----------------------------------------------------------------------
11
11
 
huff/models.py CHANGED
@@ -4,8 +4,8 @@
4
4
  # Author: Thomas Wieland
5
5
  # ORCID: 0000-0001-5168-9846
6
6
  # mail: geowieland@googlemail.com
7
- # Version: 1.0.0
8
- # Last update: 2025-04-25 18:13
7
+ # Version: 1.1.0
8
+ # Last update: 2025-04-28 19:35
9
9
  # Copyright (c) 2025 Thomas Wieland
10
10
  #-----------------------------------------------------------------------
11
11
 
@@ -13,7 +13,7 @@
13
13
  import pandas as pd
14
14
  import geopandas as gp
15
15
  import numpy as np
16
- from .ors import matrix
16
+ from huff.ors import Client
17
17
 
18
18
 
19
19
  class CustomerOrigins:
@@ -198,28 +198,54 @@ class SupplyLocations:
198
198
  self.metadata = metadata
199
199
 
200
200
  return self
201
+
202
+ def add_new_destinations(
203
+ self,
204
+ new_destinations,
205
+ ):
206
+
207
+ geodata_gpd_original = self.get_geodata_gpd_original()
208
+ geodata_gpd = self.get_geodata_gpd()
209
+ metadata = self.get_metadata()
210
+
211
+ new_destinations_gpd_original = new_destinations.get_geodata_gpd_original()
212
+ new_destinations_gpd = new_destinations.get_geodata_gpd()
213
+ new_destinations_metadata = new_destinations.get_metadata()
214
+
215
+ geodata_gpd_original = geodata_gpd_original.append(
216
+ new_destinations_gpd_original,
217
+ ignore_index=True
218
+ )
219
+
220
+ geodata_gpd = geodata_gpd.append(
221
+ new_destinations_gpd,
222
+ ignore_index=True
223
+ )
224
+
225
+ metadata["no_points"] = metadata["no_points"]+new_destinations_metadata["no_points"]
226
+
227
+ self.geodata_gpd = geodata_gpd
228
+ self.geodata_gpd_original = geodata_gpd_original
229
+ self.metadata = metadata
230
+
231
+ return self
201
232
 
202
233
  class InteractionMatrix:
203
234
 
204
235
  def __init__(
205
236
  self,
206
237
  interaction_matrix_df,
207
- market_areas_df,
208
238
  customer_origins,
209
239
  supply_locations
210
240
  ):
211
241
 
212
242
  self.interaction_matrix_df = interaction_matrix_df
213
- self.market_areas_df = market_areas_df
214
243
  self.customer_origins = customer_origins
215
244
  self.supply_locations = supply_locations
216
245
 
217
246
  def get_interaction_matrix_df(self):
218
247
  return self.interaction_matrix_df
219
248
 
220
- def get_market_areas_df(self):
221
- return self.market_areas_df
222
-
223
249
  def get_customer_origins(self):
224
250
  return self.customer_origins
225
251
 
@@ -231,7 +257,7 @@ class InteractionMatrix:
231
257
  customer_origins_metadata = self.get_customer_origins().get_metadata()
232
258
  supply_locations_metadata = self.get_supply_locations().get_metadata()
233
259
 
234
- print("Huff Model")
260
+ print("Huff Model Interaction Matrix")
235
261
  print("----------------------------------")
236
262
  print("Supply locations " + str(supply_locations_metadata["no_points"]))
237
263
  if supply_locations_metadata["attraction_col"][0] is None:
@@ -259,6 +285,7 @@ class InteractionMatrix:
259
285
  self,
260
286
  range_type: str = "time",
261
287
  time_unit: str = "minutes",
288
+ ors_server: str = "https://api.openrouteservice.org/v2/",
262
289
  ors_auth: str = None,
263
290
  save_output: bool = False,
264
291
  output_filepath: str = "transport_costs_matrix.csv"
@@ -285,8 +312,11 @@ class InteractionMatrix:
285
312
  customer_origins_index = list(range(len(customer_origins_coords)))
286
313
  locations_coords_index = list(range(len(customer_origins_index), len(locations_coords)))
287
314
 
288
- time_distance_matrix = matrix(
289
- auth = ors_auth,
315
+ ors_client = Client(
316
+ server = ors_server,
317
+ auth = ors_auth
318
+ )
319
+ time_distance_matrix = ors_client.matrix(
290
320
  locations = locations_coords,
291
321
  save_output = save_output,
292
322
  output_filepath = output_filepath,
@@ -295,6 +325,9 @@ class InteractionMatrix:
295
325
  range_type = range_type
296
326
  )
297
327
 
328
+ if time_distance_matrix.get_metadata() is None:
329
+ raise ValueError ("No transport costs matrix was built.")
330
+
298
331
  transport_costs_matrix = time_distance_matrix.get_matrix()
299
332
  transport_costs_matrix_config = time_distance_matrix.get_config()
300
333
  range_type = transport_costs_matrix_config["range_type"]
@@ -418,12 +451,98 @@ class InteractionMatrix:
418
451
  market_areas_df = market_areas_df.reset_index(drop=False)
419
452
  market_areas_df = market_areas_df.rename(columns={"E_ij": "T_j"})
420
453
 
421
- print(market_areas_df)
454
+ huff_model = HuffModel(
455
+ self,
456
+ market_areas_df
457
+ )
458
+
459
+ return huff_model
422
460
 
423
- self.market_areas_df = market_areas_df
461
+ def mci_transformation(
462
+ self,
463
+ cols = ["A_j", "t_ij"]
464
+ ):
465
+
466
+ cols = cols + ["p_ij"]
467
+
468
+ interaction_matrix_df = self.interaction_matrix_df
469
+
470
+ interaction_matrix_df = mci_transformation(
471
+ df = interaction_matrix_df,
472
+ ref_col = "i",
473
+ cols = cols
474
+ )
475
+
476
+ self.interaction_matrix_df = interaction_matrix_df
424
477
 
425
478
  return self
426
479
 
480
+ class HuffModel:
481
+
482
+ def __init__(
483
+ self,
484
+ interaction_matrix,
485
+ market_areas_df
486
+ ):
487
+
488
+ self.interaction_matrix = interaction_matrix
489
+ self.market_areas_df = market_areas_df
490
+
491
+ def get_interaction_matrix_df(self):
492
+
493
+ interaction_matrix = self.interaction_matrix
494
+ interaction_matrix_df = interaction_matrix.get_interaction_matrix_df()
495
+
496
+ return interaction_matrix_df
497
+
498
+ def get_supply_locations(self):
499
+
500
+ interaction_matrix = self.interaction_matrix
501
+ supply_locations = interaction_matrix.get_supply_locations()
502
+
503
+ return supply_locations
504
+
505
+ def get_customer_origins(self):
506
+
507
+ interaction_matrix = self.interaction_matrix
508
+ customer_origins = interaction_matrix.get_customer_origins()
509
+
510
+ return customer_origins
511
+
512
+ def get_market_areas_df(self):
513
+ return self.market_areas_df
514
+
515
+ def summary(self):
516
+
517
+ interaction_matrix = self.interaction_matrix
518
+
519
+ customer_origins_metadata = interaction_matrix.get_customer_origins().get_metadata()
520
+ supply_locations_metadata = interaction_matrix.get_supply_locations().get_metadata()
521
+
522
+ print("Huff Model")
523
+ print("----------------------------------")
524
+ print("Supply locations " + str(supply_locations_metadata["no_points"]))
525
+ if supply_locations_metadata["attraction_col"][0] is None:
526
+ print("Attraction column not defined")
527
+ else:
528
+ print("Attraction column " + supply_locations_metadata["attraction_col"][0])
529
+ print("Customer origins " + str(customer_origins_metadata["no_points"]))
530
+ if customer_origins_metadata["marketsize_col"] is None:
531
+ print("Market size column not defined")
532
+ else:
533
+ print("Market size column " + customer_origins_metadata["marketsize_col"])
534
+ print("----------------------------------")
535
+ print("Weights")
536
+ if supply_locations_metadata["weighting"][0]["func"] is None and supply_locations_metadata["weighting"][0]["param"] is None:
537
+ print("Gamma not defined")
538
+ else:
539
+ print("Gamma " + str(supply_locations_metadata["weighting"][0]["param"]) + " (" + supply_locations_metadata["weighting"][0]["func"] + ")")
540
+ if customer_origins_metadata["weighting"][0]["func"] is None and customer_origins_metadata["weighting"][0]["param"] is None:
541
+ print("Lambda not defined")
542
+ else:
543
+ print("Lambda " + str(customer_origins_metadata["weighting"][0]["param"]) + " (" + customer_origins_metadata["weighting"][0]["func"] + ")")
544
+ print("----------------------------------")
545
+
427
546
  def load_geodata (
428
547
  file,
429
548
  location_type: str,
@@ -438,7 +557,7 @@ def load_geodata (
438
557
  ):
439
558
 
440
559
  if location_type is None or (location_type != "origins" and location_type != "destinations"):
441
- raise ValueError ("location_type must be either 'loc' or 'dest'")
560
+ raise ValueError ("location_type must be either 'origins' or 'destinations'")
442
561
 
443
562
  if data_type not in ["shp", "csv", "xlsx"]:
444
563
  raise ValueError ("data_type must be 'shp', 'csv' or 'xlsx'")
@@ -517,7 +636,7 @@ def create_interaction_matrix(
517
636
  if not isinstance(customer_origins, CustomerOrigins):
518
637
  raise ValueError ("customer_origins must be of class CustomerOrigins")
519
638
  if not isinstance(supply_locations, SupplyLocations):
520
- raise ValueError ("supply_locations must be of class supply_locations")
639
+ raise ValueError ("supply_locations must be of class SupplyLocations")
521
640
 
522
641
  customer_origins_metadata = customer_origins.get_metadata()
523
642
  if customer_origins_metadata["marketsize_col"] is None:
@@ -573,13 +692,31 @@ def create_interaction_matrix(
573
692
  interaction_matrix_df["p_ij"] = None
574
693
  interaction_matrix_df["E_ij"] = None
575
694
 
576
- market_areas_df = None
577
-
578
695
  interaction_matrix = InteractionMatrix(
579
696
  interaction_matrix_df,
580
- market_areas_df,
581
697
  customer_origins,
582
698
  supply_locations
583
699
  )
584
700
 
585
- return interaction_matrix
701
+ return interaction_matrix
702
+
703
+ def mci_transformation(
704
+ df: pd.DataFrame,
705
+ ref_col: str,
706
+ cols: list
707
+ ):
708
+
709
+ def lct (x):
710
+
711
+ x_geom = np.exp(np.log(x).mean())
712
+ x_lct = np.log(x/x_geom)
713
+
714
+ return x_lct
715
+
716
+ for var in cols:
717
+
718
+ var_t = df.groupby(ref_col)[var].apply(lct)
719
+ var_t = var_t.reset_index()
720
+ df[var+"_t"] = var_t[var]
721
+
722
+ return df
huff/ors.py CHANGED
@@ -4,8 +4,8 @@
4
4
  # Author: Thomas Wieland
5
5
  # ORCID: 0000-0001-5168-9846
6
6
  # mail: geowieland@googlemail.com
7
- # Version: 1.0.0
8
- # Last update: 2025-04-25 18:14
7
+ # Version: 1.1.0
8
+ # Last update: 2025-04-26 19:35
9
9
  # Copyright (c) 2025 Thomas Wieland
10
10
  #-----------------------------------------------------------------------
11
11
 
@@ -37,17 +37,21 @@ class Isochrone:
37
37
  metadata = self.metadata
38
38
  status_code = self.status_code
39
39
 
40
- range_str = [str(range) for range in metadata["query"]["range"]]
41
- profile = metadata["query"]["profile"]
42
- range_type = metadata["query"]["range_type"]
43
- no_locations = len(metadata["query"]["locations"]) #[str(seg) for seg in metadata["query"]["locations"]]
44
-
45
- print("Locations: " + str(no_locations))
46
- print("Segments: " + ", ".join(range_str))
47
- print("Range type: " + range_type)
48
- print("Profile: " + profile)
49
- print("Status code: " + str(status_code))
40
+ if metadata is not None:
41
+ range_str = [str(range) for range in metadata["query"]["range"]]
42
+ profile = metadata["query"]["profile"]
43
+ range_type = metadata["query"]["range_type"]
44
+ no_locations = len(metadata["query"]["locations"])
50
45
 
46
+ print("Locations " + str(no_locations))
47
+ print("Segments " + ", ".join(range_str))
48
+ print("Range type " + range_type)
49
+ print("Profile " + profile)
50
+
51
+ else:
52
+ print("No isochrones were built.")
53
+
54
+ print("Status code " + str(status_code))
51
55
 
52
56
  class TimeDistanceMatrix:
53
57
 
@@ -81,254 +85,298 @@ class TimeDistanceMatrix:
81
85
  metadata = self.metadata
82
86
  status_code = self.status_code
83
87
 
84
- pass # TODO ??
85
-
86
-
87
- def isochrone(
88
- auth: str,
89
- locations: list,
90
- id: list = [],
91
- segments: list = [900, 600, 300],
92
- range_type: str = "time",
93
- intersections: str = "true",
94
- profile: str = "driving-car",
95
- save_output: bool = True,
96
- output_filepath: str = "isochrones.shp",
97
- output_crs: str = "EPSG:2056",
98
- verbose: bool = True
99
- ):
100
-
101
- ors_url = "https://api.openrouteservice.org/v2/isochrones/" + profile
102
-
103
- headers = {
104
- "Content-Type": "application/json; charset=utf-8",
105
- "Accept": "application/json, application/geo+json, application/gpx+xml, img/png; charset=utf-8",
106
- "Authorization": auth
107
- }
108
-
109
- body = {
110
- "locations": locations,
111
- "range": segments,
112
- "intersections": intersections,
113
- "range_type": range_type
114
- }
115
-
116
- try:
117
-
118
- response = requests.post(
119
- ors_url,
120
- headers=headers,
121
- json=body
122
- )
123
-
124
- except:
88
+ config = self.save_config
125
89
 
126
- if verbose is True:
127
- print ("Unknown error while accessing ORS server")
128
-
129
- status_code = 99999
130
- isochrones_gdf = None
131
- metadata = None
90
+ if metadata is not None:
132
91
 
133
- status_code = response.status_code
92
+ profile = metadata["query"]["profile"]
93
+ no_locations = len(metadata["query"]["locations"])
94
+ range_type = config["range_type"]
134
95
 
135
- if status_code == 200:
96
+ print("Locations " + str(no_locations))
97
+ print("Range type " + range_type)
98
+ print("Profile " + profile)
99
+ else:
100
+ print("No time/distance matrix was built.")
101
+ print("Status code " + str(status_code))
136
102
 
137
- if verbose is True:
138
- print ("Accessing ORS server successful")
103
+ class Client:
139
104
 
140
- response_json = response.json()
141
-
142
- metadata = response_json["metadata"]
105
+ def __init__(
106
+ self,
107
+ server = "https://api.openrouteservice.org/v2/",
108
+ auth: str = None
109
+ ):
143
110
 
144
- features = response_json["features"]
145
- geometries = [shape(feature["geometry"]) for feature in features]
111
+ self.server = server
112
+ self.auth = auth
113
+
114
+ def isochrone(
115
+ self,
116
+ locations: list,
117
+ id: list = [],
118
+ segments: list = [900, 600, 300],
119
+ range_type: str = "time",
120
+ intersections: str = "true",
121
+ profile: str = "driving-car",
122
+ save_output: bool = True,
123
+ output_filepath: str = "isochrones.shp",
124
+ output_crs: str = "EPSG:2056",
125
+ verbose: bool = True
126
+ ):
146
127
 
147
- isochrones_gdf = gp.GeoDataFrame(
148
- features,
149
- geometry=geometries,
150
- crs="EPSG:4326"
151
- )
128
+ ors_url = self.server + "isochrones/" + profile
129
+ auth = self.auth
152
130
 
153
- isochrones_gdf["segment"] = 0
154
- isochrones_gdf_properties_dict = dict(isochrones_gdf["properties"])
131
+ headers = {
132
+ "Content-Type": "application/json; charset=utf-8",
133
+ "Accept": "application/json, application/geo+json, application/gpx+xml, img/png; charset=utf-8",
134
+ "Authorization": auth
135
+ }
155
136
 
156
- for i in range(len(isochrones_gdf_properties_dict)):
157
- isochrones_gdf.iloc[i,3] = isochrones_gdf_properties_dict[i]["value"]
137
+ body = {
138
+ "locations": locations,
139
+ "range": segments,
140
+ "intersections": intersections,
141
+ "range_type": range_type
142
+ }
158
143
 
159
- isochrones_gdf = isochrones_gdf.drop(columns=["properties"])
160
- isochrones_gdf = isochrones_gdf.to_crs(output_crs)
144
+ save_config = {
145
+ "range_type": range_type,
146
+ "save_output": save_output,
147
+ "output_filepath" : output_filepath,
148
+ "output_crs": output_crs
149
+ }
161
150
 
162
- if save_output:
163
- isochrones_gdf.to_file(output_filepath)
164
- print ("Saved as", output_filepath)
151
+ try:
165
152
 
166
- else:
153
+ response = requests.post(
154
+ ors_url,
155
+ headers=headers,
156
+ json=body
157
+ )
158
+
159
+ except:
167
160
 
168
- if verbose is True:
169
- print ("Error while accessing ORS server. Status Code: " + str(status_code))
161
+ if verbose is True:
162
+ print ("Unknown error while accessing ORS server")
163
+
164
+ status_code = 99999
165
+ isochrones_gdf = None
166
+ metadata = None
167
+
168
+ isochrone_output = Isochrone(
169
+ isochrones_gdf,
170
+ metadata,
171
+ status_code,
172
+ save_config
173
+ )
174
+
175
+ return isochrone_output
170
176
 
171
- isochrones_gdf = None
172
- metadata = None
177
+ status_code = response.status_code
173
178
 
174
- save_config = {
175
- "range_type": range_type,
176
- "save_output": save_output,
177
- "output_filepath" : output_filepath,
178
- "output_crs": output_crs
179
- }
180
-
181
- isochrone_output = Isochrone(
182
- isochrones_gdf,
183
- metadata,
184
- status_code,
185
- save_config
186
- )
187
-
188
- return isochrone_output
189
-
190
- def matrix(
191
- auth,
192
- locations: list,
193
- sources: list = [],
194
- destinations: list = [],
195
- id: str = None,
196
- range_type = "time",
197
- metrics: list = [],
198
- resolve_locations: bool = False,
199
- units: str = "mi",
200
- save_output = False,
201
- output_filepath = "matrix.csv",
202
- csv_sep = ";",
203
- csv_decimal = ",",
204
- csv_encoding = None,
205
- verbose = True
206
- ):
207
-
208
- ors_url = "https://api.openrouteservice.org/v2/matrix/driving-car"
209
-
210
- headers = {
211
- "Content-Type": "application/json; charset=utf-8",
212
- "Accept": "application/json, application/geo+json, application/gpx+xml, img/png; charset=utf-8",
213
- "Authorization": auth
214
- }
215
-
216
- body = {
217
- "locations": locations,
218
- "resolve_locations": resolve_locations
219
- }
220
- if id is not None:
221
- body["id"] = id
222
- if metrics != []:
223
- body["metrics"] = metrics
224
- if sources != []:
225
- body["sources"] = sources
226
- if destinations != []:
227
- body["destinations"] = destinations
228
- if units is not None:
229
- body["units"] = units
230
-
231
- try:
232
-
233
- response = requests.post(
234
- ors_url,
235
- headers=headers,
236
- json=body
237
- )
238
-
239
- except:
240
-
241
- if verbose is True:
242
- print ("Unknown error while accessing ORS server")
243
-
244
- status_code = 99999
245
- matrix_df = None
246
- metadata = None
247
-
248
- status_code = response.status_code
179
+ if status_code == 200:
249
180
 
250
- if status_code == 200:
181
+ if verbose is True:
182
+ print ("Accessing ORS server successful")
251
183
 
252
- if verbose is True:
253
- print ("Accessing ORS server successful")
184
+ response_json = response.json()
185
+
186
+ metadata = response_json["metadata"]
187
+
188
+ features = response_json["features"]
189
+ geometries = [shape(feature["geometry"]) for feature in features]
254
190
 
255
- response_json = response.json()
191
+ isochrones_gdf = gp.GeoDataFrame(
192
+ features,
193
+ geometry=geometries,
194
+ crs="EPSG:4326"
195
+ )
256
196
 
257
- metadata = response_json["metadata"]
197
+ isochrones_gdf["segment"] = 0
198
+ isochrones_gdf_properties_dict = dict(isochrones_gdf["properties"])
258
199
 
259
- matrix_df = pd.DataFrame(
260
- columns=[
261
- "source",
262
- "source_lat",
263
- "source_lon",
264
- "source_snapped_distance",
265
- "destination",
266
- "destination_lat",
267
- "destination_lon",
268
- "destination_snapped_distance",
269
- "source_destination",
270
- range_type
271
- ])
200
+ for i in range(len(isochrones_gdf_properties_dict)):
201
+ isochrones_gdf.iloc[i,3] = isochrones_gdf_properties_dict[i]["value"]
272
202
 
273
- for i, value in enumerate(response_json["durations"]):
203
+ isochrones_gdf = isochrones_gdf.drop(columns=["properties"])
204
+ isochrones_gdf = isochrones_gdf.to_crs(output_crs)
274
205
 
275
- source_lat = response_json["sources"][i]["location"][1]
276
- source_lon = response_json["sources"][i]["location"][0]
277
- source_snapped_distance = response_json["sources"][i]["snapped_distance"]
278
-
279
- for j, entry in enumerate(value):
280
-
281
- destination_lat = response_json["destinations"][j]["location"][1]
282
- destination_lon = response_json["destinations"][j]["location"][0]
283
- destination_snapped_distance = response_json["destinations"][j]["snapped_distance"]
284
-
285
- matrix_row = pd.Series(
286
- {
287
- "source": str(i),
288
- "source_lat": source_lat,
289
- "source_lon": source_lon,
290
- "source_snapped_distance": source_snapped_distance,
291
- "destination": str(j),
292
- "destination_lat": destination_lat,
293
- "destination_lon": destination_lon,
294
- "destination_snapped_distance": destination_snapped_distance,
295
- "source_destination": str(i)+"_"+str(j),
296
- range_type: entry
297
- }
298
- )
299
-
300
- matrix_df = pd.concat([
301
- matrix_df,
302
- pd.DataFrame([matrix_row])])
303
-
304
- if save_output:
305
- matrix_df.to_csv(
306
- output_filepath,
307
- decimal = csv_decimal,
308
- sep = csv_sep,
309
- encoding = csv_encoding
310
- )
311
- print ("Saved as", output_filepath)
206
+ if save_output:
207
+ isochrones_gdf.to_file(output_filepath)
208
+ print ("Saved as", output_filepath)
312
209
 
313
- else:
210
+ else:
314
211
 
315
- if verbose is True:
316
- print ("Error in accessing ORS server. Status Code: " + str(status_code))
212
+ if verbose is True:
213
+ print ("Error while accessing ORS server. Status Code: " + str(status_code))
317
214
 
318
- matrix_df = None
319
- metadata = None
215
+ isochrones_gdf = None
216
+ metadata = None
217
+
218
+ isochrone_output = Isochrone(
219
+ isochrones_gdf,
220
+ metadata,
221
+ status_code,
222
+ save_config
223
+ )
224
+
225
+ return isochrone_output
226
+
227
+ def matrix(
228
+ self,
229
+ locations: list,
230
+ sources: list = [],
231
+ destinations: list = [],
232
+ id: str = None,
233
+ range_type = "time",
234
+ profile = "driving-car",
235
+ metrics: list = [],
236
+ resolve_locations: bool = False,
237
+ units: str = "mi",
238
+ save_output = False,
239
+ output_filepath = "matrix.csv",
240
+ csv_sep = ";",
241
+ csv_decimal = ",",
242
+ csv_encoding = None,
243
+ verbose = True
244
+ ):
320
245
 
321
- save_config = {
322
- "range_type": range_type,
323
- "save_output": save_output,
324
- "output_filepath": output_filepath
325
- }
246
+ ors_url = self.server + "matrix/" + profile
247
+ auth = self.auth
326
248
 
327
- matrix_output = TimeDistanceMatrix(
328
- matrix_df,
329
- metadata,
330
- status_code,
331
- save_config
332
- )
333
-
334
- return matrix_output
249
+ headers = {
250
+ "Content-Type": "application/json; charset=utf-8",
251
+ "Accept": "application/json, application/geo+json, application/gpx+xml, img/png; charset=utf-8",
252
+ "Authorization": auth
253
+ }
254
+
255
+ body = {
256
+ "locations": locations,
257
+ "resolve_locations": resolve_locations
258
+ }
259
+ if id is not None:
260
+ body["id"] = id
261
+ if metrics != []:
262
+ body["metrics"] = metrics
263
+ if sources != []:
264
+ body["sources"] = sources
265
+ if destinations != []:
266
+ body["destinations"] = destinations
267
+ if units is not None:
268
+ body["units"] = units
269
+
270
+ save_config = {
271
+ "range_type": range_type,
272
+ "save_output": save_output,
273
+ "output_filepath": output_filepath
274
+ }
275
+
276
+ try:
277
+
278
+ response = requests.post(
279
+ ors_url,
280
+ headers=headers,
281
+ json=body
282
+ )
283
+
284
+ except:
285
+
286
+ if verbose is True:
287
+ print ("Unknown error while accessing ORS server")
288
+
289
+ status_code = 99999
290
+ matrix_df = None
291
+ metadata = None
292
+
293
+ matrix_output = TimeDistanceMatrix(
294
+ matrix_df,
295
+ metadata,
296
+ status_code,
297
+ save_config
298
+ )
299
+
300
+ return matrix_output
301
+
302
+ status_code = response.status_code
303
+
304
+ if status_code == 200:
305
+
306
+ if verbose is True:
307
+ print ("Accessing ORS server successful")
308
+
309
+ response_json = response.json()
310
+
311
+ metadata = response_json["metadata"]
312
+
313
+ matrix_df = pd.DataFrame(
314
+ columns=[
315
+ "source",
316
+ "source_lat",
317
+ "source_lon",
318
+ "source_snapped_distance",
319
+ "destination",
320
+ "destination_lat",
321
+ "destination_lon",
322
+ "destination_snapped_distance",
323
+ "source_destination",
324
+ range_type
325
+ ])
326
+
327
+ for i, value in enumerate(response_json["durations"]):
328
+
329
+ source_lat = response_json["sources"][i]["location"][1]
330
+ source_lon = response_json["sources"][i]["location"][0]
331
+ source_snapped_distance = response_json["sources"][i]["snapped_distance"]
332
+
333
+ for j, entry in enumerate(value):
334
+
335
+ destination_lat = response_json["destinations"][j]["location"][1]
336
+ destination_lon = response_json["destinations"][j]["location"][0]
337
+ destination_snapped_distance = response_json["destinations"][j]["snapped_distance"]
338
+
339
+ matrix_row = pd.Series(
340
+ {
341
+ "source": str(i),
342
+ "source_lat": source_lat,
343
+ "source_lon": source_lon,
344
+ "source_snapped_distance": source_snapped_distance,
345
+ "destination": str(j),
346
+ "destination_lat": destination_lat,
347
+ "destination_lon": destination_lon,
348
+ "destination_snapped_distance": destination_snapped_distance,
349
+ "source_destination": str(i)+"_"+str(j),
350
+ range_type: entry
351
+ }
352
+ )
353
+
354
+ matrix_df = pd.concat([
355
+ matrix_df,
356
+ pd.DataFrame([matrix_row])])
357
+
358
+ if save_output:
359
+ matrix_df.to_csv(
360
+ output_filepath,
361
+ decimal = csv_decimal,
362
+ sep = csv_sep,
363
+ encoding = csv_encoding
364
+ )
365
+ print ("Saved as", output_filepath)
366
+
367
+ else:
368
+
369
+ if verbose is True:
370
+ print ("Error in accessing ORS server. Status Code: " + str(status_code))
371
+
372
+ matrix_df = None
373
+ metadata = None
374
+
375
+ matrix_output = TimeDistanceMatrix(
376
+ matrix_df,
377
+ metadata,
378
+ status_code,
379
+ save_config
380
+ )
381
+
382
+ return matrix_output
huff/tests/tests_huff.py CHANGED
@@ -4,25 +4,28 @@
4
4
  # Author: Thomas Wieland
5
5
  # ORCID: 0000-0001-5168-9846
6
6
  # mail: geowieland@googlemail.com
7
- # Version: 1.0.0
8
- # Last update: 2025-04-25 18:08
7
+ # Version: 1.1.0
8
+ # Last update: 2025-04-28 19:36
9
9
  # Copyright (c) 2025 Thomas Wieland
10
10
  #-----------------------------------------------------------------------
11
11
 
12
12
 
13
- from ..ors import isochrone, matrix
14
- from ..models import load_geodata, create_interaction_matrix
13
+ from huff.ors import Client
14
+ from huff.models import load_geodata, create_interaction_matrix
15
15
 
16
16
 
17
17
  # Isochrones test:
18
18
 
19
19
  output_path = "."
20
20
 
21
- isochrone_ORS = isochrone (
22
- auth = "5b3ce3597851110001cf62480a15aafdb5a64f4d91805929f8af6abd",
23
- locations =[[7.593301534652711, 47.54329763735186], [9.207916,49.153868]],
21
+ ors_client = Client(
22
+ auth = "5b3ce3597851110001cf62480a15aafdb5a64f4d91805929f8af6abd"
23
+ )
24
+
25
+ isochrone_ORS = ors_client.isochrone (
26
+ locations = [[7.593301534652711, 47.54329763735186], [9.207916,49.153868]],
24
27
  save_output = True,
25
- output_filepath = "test.shp",
28
+ output_filepath = "test_isochrones.shp",
26
29
  intersections="false"
27
30
  )
28
31
 
@@ -30,19 +33,19 @@ isochrone_ORS.summary()
30
33
 
31
34
  # Matrix test:
32
35
 
33
- matrix_ORS = matrix(auth="5b3ce3597851110001cf62480a15aafdb5a64f4d91805929f8af6abd",
36
+ matrix_ORS = ors_client.matrix(
34
37
  locations=[[9.70093,48.477473],[9.207916,49.153868],[37.573242,55.801281],[115.663757,38.106467]],
35
38
  save_output=True,
36
- output_filepath="testmatrix.csv"
39
+ output_filepath="test_matrix.csv"
37
40
  )
38
41
 
39
- print(matrix_ORS)
42
+ matrix_ORS.summary()
40
43
 
41
44
 
42
45
  # Huff model test data:
43
46
 
44
47
  Haslach = load_geodata(
45
- "huff/tests/data/Haslach.shp",
48
+ "data/Haslach.shp",
46
49
  location_type="origins",
47
50
  unique_id="BEZEICHN"
48
51
  )
@@ -51,13 +54,15 @@ Haslach.summary()
51
54
 
52
55
  Haslach.define_marketsize("pop")
53
56
 
54
- Haslach.define_transportcosts_weighting()
57
+ Haslach.define_transportcosts_weighting(
58
+ param_lambda=-2.2
59
+ )
55
60
 
56
61
  Haslach.summary()
57
62
 
58
63
 
59
64
  Haslach_supermarkets = load_geodata(
60
- "huff/tests/data/Haslach_supermarkets.shp",
65
+ "data/Haslach_supermarkets.shp",
61
66
  location_type="destinations",
62
67
  unique_id="LFDNR"
63
68
  )
@@ -66,7 +71,9 @@ Haslach_supermarkets.summary()
66
71
 
67
72
  Haslach_supermarkets.define_attraction("VKF_qm")
68
73
 
69
- Haslach_supermarkets.define_attraction_weighting()
74
+ Haslach_supermarkets.define_attraction_weighting(
75
+ param_gamma=0.9
76
+ )
70
77
 
71
78
  Haslach_supermarkets.summary()
72
79
 
@@ -82,8 +89,8 @@ interaction_matrix = haslach_interactionmatrix.transport_costs(
82
89
 
83
90
  interaction_matrix = interaction_matrix.flows()
84
91
 
85
- interaction_matrix.get_interaction_matrix_df().to_excel("interaction_matrix.xlsx")
92
+ huff_model = interaction_matrix.marketareas()
86
93
 
87
- interaction_matrix.marketareas()
94
+ print(interaction_matrix.get_interaction_matrix_df())
88
95
 
89
- interaction_matrix.summary()
96
+ print(huff_model.get_market_areas_df())
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.2
1
+ Metadata-Version: 2.1
2
2
  Name: huff
3
- Version: 1.0.0
3
+ Version: 1.1.0
4
4
  Summary: huff: Huff Model Market Area Analysis
5
5
  Author: Thomas Wieland
6
6
  Author-email: geowieland@googlemail.com
@@ -8,12 +8,6 @@ Description-Content-Type: text/markdown
8
8
  Requires-Dist: geopandas
9
9
  Requires-Dist: pandas
10
10
  Requires-Dist: numpy
11
- Dynamic: author
12
- Dynamic: author-email
13
- Dynamic: description
14
- Dynamic: description-content-type
15
- Dynamic: requires-dist
16
- Dynamic: summary
17
11
 
18
12
  # huff: Huff Model Market Area Analysis
19
13
 
@@ -1,9 +1,9 @@
1
1
  huff/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- huff/gistools.py,sha256=pyNYiRG_PwjiVlOLY26zPDIGVHE79uwyz48kGQ4k9us,1625
3
- huff/models.py,sha256=UxFo2laIrNahWrcf-_4xVMO9eR_w7kIBeoG_w_eGD70,21476
4
- huff/ors.py,sha256=TF1US2Tc1L4ny6-S4JFIkDzgxxrtFsD_N9A-FQBabLM,9397
2
+ huff/gistools.py,sha256=S-C2wjHWtdkpUtxMEKznJp0fCi4GwB8IBRYtuiqnmGg,1625
3
+ huff/models.py,sha256=tjriIk9EaAWbgrll8Z1bFjFCwD9UgsBuoVfqUMUbGME,26275
4
+ huff/ors.py,sha256=XtRn6gtR1P1VmIBsflzDDu6_GLWwCg-Gdl_cogQnX-o,11515
5
5
  huff/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
- huff/tests/tests_huff.py,sha256=6L16suvmcYtMBmgppHnhqJSCr6OI1YJh1Bh114ElfUc,2294
6
+ huff/tests/tests_huff.py,sha256=rfZ-Iv6cFl9rzlbQxS7sSVf2BgornW-764cEpCoQgoE,2321
7
7
  huff/tests/data/Haslach.cpg,sha256=OtMDH1UDpEBK-CUmLugjLMBNTqZoPULF3QovKiesmCQ,5
8
8
  huff/tests/data/Haslach.dbf,sha256=GVPIt05OzDO7UrRDcsMhiYWvyXAPg6Z-qkiysFzj-fc,506
9
9
  huff/tests/data/Haslach.prj,sha256=2Jy1Vlzh7UxQ1MXpZ9UYLs2SxfrObj2xkEkZyLqmGTY,437
@@ -16,7 +16,7 @@ huff/tests/data/Haslach_supermarkets.prj,sha256=2Jy1Vlzh7UxQ1MXpZ9UYLs2SxfrObj2x
16
16
  huff/tests/data/Haslach_supermarkets.qmd,sha256=j9i4_Pz7ZMSG2UDSb3nuhJpw0KWXIRhiiDymqJP6_Fo,2479
17
17
  huff/tests/data/Haslach_supermarkets.shp,sha256=X7QbQ0BTMag_B-bDRbpr-go2BQIXo3Y8zMAKpYZmlps,324
18
18
  huff/tests/data/Haslach_supermarkets.shx,sha256=j23QHX-SmdAeN04rw0x8nUOran-OCg_T6r_LvzzEPWs,164
19
- huff-1.0.0.dist-info/METADATA,sha256=Xl1URHFyO5OqALQcLsBtENr-TNO5cm0b7xo8479hOyg,749
20
- huff-1.0.0.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
21
- huff-1.0.0.dist-info/top_level.txt,sha256=nlzX-PxZNFmIxANIJMySuIFPihd6qOBkRlhIC28NEsQ,5
22
- huff-1.0.0.dist-info/RECORD,,
19
+ huff-1.1.0.dist-info/METADATA,sha256=wKaC8cqy3oc20SAhIz5fYw0IdXOJCEP_T8KkiqQPS34,610
20
+ huff-1.1.0.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
21
+ huff-1.1.0.dist-info/top_level.txt,sha256=nlzX-PxZNFmIxANIJMySuIFPihd6qOBkRlhIC28NEsQ,5
22
+ huff-1.1.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.8.0)
2
+ Generator: bdist_wheel (0.45.1)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5