huff 1.1.2__py3-none-any.whl → 1.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
huff/ors.py CHANGED
@@ -4,8 +4,8 @@
4
4
  # Author: Thomas Wieland
5
5
  # ORCID: 0000-0001-5168-9846
6
6
  # mail: geowieland@googlemail.com
7
- # Version: 1.1.2
8
- # Last update: 2025-05-03 13:33
7
+ # Version: 1.2.1
8
+ # Last update: 2025-05-22 05:46
9
9
  # Copyright (c) 2025 Thomas Wieland
10
10
  #-----------------------------------------------------------------------
11
11
 
@@ -162,9 +162,9 @@ class Client:
162
162
  timeout=timeout
163
163
  )
164
164
 
165
- except:
165
+ except Exception as e:
166
166
 
167
- print ("Unknown error while accessing ORS server")
167
+ print ("Error while accessing ORS server: ", str(e))
168
168
 
169
169
  status_code = 99999
170
170
  isochrones_gdf = None
@@ -212,9 +212,9 @@ class Client:
212
212
  print ("Saved as", output_filepath)
213
213
 
214
214
  else:
215
-
216
- print ("Error while accessing ORS server. Status Code: " + str(status_code))
217
-
215
+
216
+ print(f"Error while accessing ORS server. Status code: {status_code} - {response.reason}")
217
+
218
218
  isochrones_gdf = None
219
219
  metadata = None
220
220
 
@@ -233,14 +233,14 @@ class Client:
233
233
  sources: list = [],
234
234
  destinations: list = [],
235
235
  id: str = None,
236
- range_type = "time",
237
- profile = "driving-car",
236
+ range_type: str = "time",
237
+ profile: str = "driving-car",
238
238
  metrics: list = [],
239
239
  resolve_locations: bool = False,
240
240
  units: str = "mi",
241
- timeout = 10,
242
- save_output = False,
243
- output_filepath = "matrix.csv",
241
+ timeout: int = 10,
242
+ save_output: bool = False,
243
+ output_filepath: str = "matrix.csv",
244
244
  csv_sep = ";",
245
245
  csv_decimal = ",",
246
246
  csv_encoding = None
@@ -285,9 +285,9 @@ class Client:
285
285
  timeout=timeout
286
286
  )
287
287
 
288
- except:
289
-
290
- print ("Unknown error while accessing ORS server")
288
+ except Exception as e:
289
+
290
+ print ("Error while accessing ORS server: ", str(e))
291
291
 
292
292
  status_code = 99999
293
293
  matrix_df = None
@@ -368,7 +368,7 @@ class Client:
368
368
 
369
369
  else:
370
370
 
371
- print ("Error in accessing ORS server. Status Code: " + str(status_code))
371
+ print(f"Error while accessing ORS server. Status code: {status_code} - {response.reason}")
372
372
 
373
373
  matrix_df = None
374
374
  metadata = None
huff/osm.py ADDED
@@ -0,0 +1,207 @@
1
+ #-----------------------------------------------------------------------
2
+ # Name: osm (huff package)
3
+ # Purpose: Helper functions for OpenStreetMap API
4
+ # Author: Thomas Wieland
5
+ # ORCID: 0000-0001-5168-9846
6
+ # mail: geowieland@googlemail.com
7
+ # Version: 1.0.0
8
+ # Last update: 2025-05-22 05:46
9
+ # Copyright (c) 2025 Thomas Wieland
10
+ #-----------------------------------------------------------------------
11
+
12
+
13
+ import pandas as pd
14
+ import geopandas as gpd
15
+ import math
16
+ import requests
17
+ import tempfile
18
+ import time
19
+ import os
20
+ from PIL import Image
21
+ import matplotlib.pyplot as plt
22
+ import contextily as cx
23
+ from shapely.geometry import box
24
+
25
+
26
+ class Client:
27
+
28
+ def __init__(
29
+ self,
30
+ server = "http://a.tile.openstreetmap.org/",
31
+ headers = {
32
+ 'User-Agent': 'huff.osm/1.0.0 (your_name@your_email_provider.com)'
33
+ }
34
+ ):
35
+
36
+ self.server = server
37
+ self.headers = headers
38
+
39
+ def download_tile(
40
+ self,
41
+ zoom,
42
+ x,
43
+ y,
44
+ timeout = 10
45
+ ):
46
+
47
+ osm_url = self.server + f"{zoom}/{x}/{y}.png"
48
+
49
+ response = requests.get(
50
+ osm_url,
51
+ headers = self.headers,
52
+ timeout = timeout
53
+ )
54
+
55
+ if response.status_code == 200:
56
+
57
+ with tempfile.NamedTemporaryFile(delete=False, suffix='.png') as tmp_file:
58
+ tmp_file.write(response.content)
59
+ tmp_file_path = tmp_file.name
60
+ return Image.open(tmp_file_path)
61
+
62
+ else:
63
+
64
+ print(f"Error while accessing OSM server. Status code: {response.status_code} - {response.reason}")
65
+
66
+ return None
67
+
68
+
69
+ def get_basemap(
70
+ sw_lat,
71
+ sw_lon,
72
+ ne_lat,
73
+ ne_lon,
74
+ zoom = 15
75
+ ):
76
+
77
+ def lat_lon_to_tile(
78
+ lat,
79
+ lon,
80
+ zoom
81
+ ):
82
+
83
+ n = 2 ** zoom
84
+ x = int(n * ((lon + 180) / 360))
85
+ y = int(n * (1 - (math.log(math.tan(math.radians(lat)) + 1 / math.cos(math.radians(lat))) / math.pi)) / 2)
86
+ return x, y
87
+
88
+ def stitch_tiles(
89
+ zoom,
90
+ sw_lat,
91
+ sw_lon,
92
+ ne_lat,
93
+ ne_lon,
94
+ delay = 0.1
95
+ ):
96
+
97
+ osm_client = Client(
98
+ server = "http://a.tile.openstreetmap.org/"
99
+ )
100
+
101
+ sw_x_tile, sw_y_tile = lat_lon_to_tile(sw_lat, sw_lon, zoom)
102
+ ne_x_tile, ne_y_tile = lat_lon_to_tile(ne_lat, ne_lon, zoom)
103
+
104
+ tile_size = 256
105
+ width = (ne_x_tile - sw_x_tile + 1) * tile_size
106
+ height = (sw_y_tile - ne_y_tile + 1) * tile_size
107
+
108
+ stitched_image = Image.new('RGB', (width, height))
109
+
110
+ for x in range(sw_x_tile, ne_x_tile + 1):
111
+ for y in range(ne_y_tile, sw_y_tile + 1):
112
+ tile = osm_client.download_tile(
113
+ zoom = zoom,
114
+ x = x,
115
+ y = y
116
+ )
117
+ if tile:
118
+
119
+ stitched_image.paste(tile, ((x - sw_x_tile) * tile_size, (sw_y_tile - y) * tile_size))
120
+ else:
121
+ print(f"Error while retrieving tile {x}, {y}.")
122
+
123
+ time.sleep(delay)
124
+
125
+ return stitched_image
126
+
127
+ stitched_image = stitch_tiles(zoom, sw_lat, sw_lon, ne_lat, ne_lon)
128
+
129
+ if stitched_image:
130
+
131
+ stitched_image_path = "osm_map.png"
132
+ stitched_image.save(stitched_image_path)
133
+
134
+ else:
135
+ print("Error while building stitched images")
136
+
137
+
138
+ def map_with_basemap(
139
+ layers: list,
140
+ osm_basemap: bool = True,
141
+ zoom: int = 15,
142
+ styles: dict = {},
143
+ save_output: bool = True,
144
+ output_filepath: str = "osm_map_with_basemap.png",
145
+ output_dpi = 300
146
+ ):
147
+
148
+ if not layers:
149
+ raise ValueError("List layers is empty")
150
+
151
+ combined = gpd.GeoDataFrame(
152
+ pd.concat(
153
+ layers,
154
+ ignore_index = True
155
+ ),
156
+ crs = layers[0].crs
157
+ )
158
+
159
+ combined_wgs84 = combined.to_crs(epsg=4326)
160
+ bounds = combined_wgs84.total_bounds
161
+
162
+ sw_lon, sw_lat, ne_lon, ne_lat = bounds[0]*0.9999, bounds[1]*0.9999, bounds[2]*1.0001, bounds[3]*1.0001
163
+
164
+ if osm_basemap:
165
+
166
+ get_basemap(sw_lat, sw_lon, ne_lat, ne_lon, zoom=zoom)
167
+
168
+ fig, ax = plt.subplots(figsize=(10, 10))
169
+
170
+ if osm_basemap:
171
+
172
+ img = Image.open("osm_map.png")
173
+ extent_img = [sw_lon, ne_lon, sw_lat, ne_lat]
174
+ ax.imshow(img, extent=extent_img, origin="upper")
175
+
176
+ for layer in layers:
177
+ layer_3857 = layer.to_crs(epsg=3857)
178
+ layer_3857.plot(ax=ax, alpha=0.6)
179
+
180
+ bbox = box(sw_lon, sw_lat, ne_lon, ne_lat)
181
+ extent_geom = gpd.GeoSeries([bbox], crs=4326).to_crs(epsg=3857).total_bounds
182
+
183
+ ax.set_xlim(extent_geom[0], extent_geom[2])
184
+ ax.set_ylim(extent_geom[1], extent_geom[3])
185
+
186
+ if osm_basemap:
187
+ cx.add_basemap(
188
+ ax,
189
+ source=cx.providers.OpenStreetMap.Mapnik,
190
+ zoom=zoom
191
+ )
192
+
193
+ plt.axis('off')
194
+ plt.show()
195
+
196
+ if save_output:
197
+
198
+ plt.savefig(
199
+ output_filepath,
200
+ dpi = output_dpi,
201
+ bbox_inches="tight"
202
+ )
203
+
204
+ plt.close()
205
+
206
+ if os.path.exists("osm_map.png"):
207
+ os.remove("osm_map.png")
Binary file
huff/tests/tests_huff.py CHANGED
@@ -4,99 +4,204 @@
4
4
  # Author: Thomas Wieland
5
5
  # ORCID: 0000-0001-5168-9846
6
6
  # mail: geowieland@googlemail.com
7
- # Version: 1.1.2
8
- # Last update: 2025-05-03 13:32
7
+ # Version: 1.3.0
8
+ # Last update: 2025-05-22 05:41
9
9
  # Copyright (c) 2025 Thomas Wieland
10
10
  #-----------------------------------------------------------------------
11
11
 
12
12
 
13
- from huff.ors import Client
14
- from huff.models import load_geodata, create_interaction_matrix
13
+ from huff.models import create_interaction_matrix, get_isochrones, load_geodata, load_interaction_matrix
14
+ from huff.osm import map_with_basemap
15
+ from huff.gistools import buffers, point_spatial_join
15
16
 
16
17
 
17
- # Isochrones test:
18
-
19
- output_path = "."
20
-
21
- ors_client = Client(
22
- auth = "5b3ce3597851110001cf62480a15aafdb5a64f4d91805929f8af6abd"
23
- )
24
-
25
- isochrone_ORS = ors_client.isochrone (
26
- locations = [[7.593301534652711, 47.54329763735186], [9.207916,49.153868]],
27
- save_output = True,
28
- output_filepath = "test_isochrones.shp",
29
- intersections="false"
30
- )
31
-
32
- isochrone_ORS.summary()
33
-
34
- # Matrix test:
35
-
36
- matrix_ORS = ors_client.matrix(
37
- locations=[[9.70093,48.477473],[9.207916,49.153868],[37.573242,55.801281],[115.663757,38.106467]],
38
- save_output=True,
39
- output_filepath="test_matrix.csv"
40
- )
41
-
42
- matrix_ORS.summary()
43
-
44
-
45
- # Huff model test data:
18
+ # Customer origins (statistical districts):
46
19
 
47
20
  Haslach = load_geodata(
48
21
  "data/Haslach.shp",
49
22
  location_type="origins",
50
23
  unique_id="BEZEICHN"
51
24
  )
25
+ # Loading customer origins (shapefile)
26
+
27
+ Haslach_buf = Haslach.buffers(
28
+ segments_distance=[500,1000,1500],
29
+ save_output=True,
30
+ output_filepath="Haslach_buf.shp",
31
+ output_crs="EPSG:31467"
32
+ )
33
+ # Buffers for customer origins
52
34
 
53
35
  Haslach.summary()
36
+ # Summary of customer origins
54
37
 
55
38
  Haslach.define_marketsize("pop")
39
+ # Definition of market size variable
56
40
 
57
41
  Haslach.define_transportcosts_weighting(
58
- param_lambda=-2.2
42
+ param_lambda = -2.2
59
43
  )
44
+ # Definition of transport costs weighting (lambda)
60
45
 
61
46
  Haslach.summary()
47
+ # Summary after update
62
48
 
63
49
 
50
+ # Supply locations (supermarkets):
51
+
64
52
  Haslach_supermarkets = load_geodata(
65
53
  "data/Haslach_supermarkets.shp",
66
54
  location_type="destinations",
67
55
  unique_id="LFDNR"
68
56
  )
57
+ # Loading supply locations (shapefile)
69
58
 
70
59
  Haslach_supermarkets.summary()
71
-
72
- Haslach_supermarkets.isochrones(
73
- save_output=True,
74
- ors_auth = "5b3ce3597851110001cf62480a15aafdb5a64f4d91805929f8af6abd",
75
- output_filepath="Haslach_supermarkets_iso.shp"
76
- )
60
+ # Summary of supply locations
77
61
 
78
62
  Haslach_supermarkets.define_attraction("VKF_qm")
63
+ # Defining attraction variable
79
64
 
80
65
  Haslach_supermarkets.define_attraction_weighting(
81
66
  param_gamma=0.9
82
67
  )
68
+ # Define attraction weighting (gamma)
69
+
70
+ Haslach_supermarkets.isochrones(
71
+ segments_minutes=[5, 10],
72
+ profile = "foot-walking",
73
+ save_output=True,
74
+ ors_auth="5b3ce3597851110001cf62480a15aafdb5a64f4d91805929f8af6abd",
75
+ output_filepath="Haslach_supermarkets_iso.shp",
76
+ output_crs="EPSG:31467"
77
+ )
78
+ # Obtaining isochrones for walking (5 and 10 minutes)
79
+ # ORS API documentation: https://openrouteservice.org/dev/#/api-docs/v2/
83
80
 
84
81
  Haslach_supermarkets.summary()
82
+ # Summary of updated customer origins
83
+
84
+ Haslach_supermarkets_isochrones = Haslach_supermarkets.get_isochrones_gdf()
85
+ # Extracting isochrones
86
+
87
+ print(Haslach_supermarkets_isochrones)
85
88
 
86
89
 
90
+ # Using customer origins and supply locations for building interaction matrix:
91
+
87
92
  haslach_interactionmatrix = create_interaction_matrix(
88
93
  Haslach,
89
94
  Haslach_supermarkets
90
95
  )
96
+ # Creating interaction matrix
91
97
 
92
98
  interaction_matrix = haslach_interactionmatrix.transport_costs(
93
99
  ors_auth="5b3ce3597851110001cf62480a15aafdb5a64f4d91805929f8af6abd"
100
+ #network=False
94
101
  )
102
+ # Obtaining transport costs (default: driving-car)
103
+ # ORS API documentation: https://openrouteservice.org/dev/#/api-docs/v2/
95
104
 
96
105
  interaction_matrix = interaction_matrix.flows()
106
+ # Calculating spatial flows
97
107
 
98
108
  huff_model = interaction_matrix.marketareas()
109
+ # Calculating total market areas
110
+
111
+ huff_model.summary()
112
+ # Summary of Huff model
113
+
114
+ print(huff_model.get_market_areas_df())
115
+ # Showing total market areas
99
116
 
100
117
  print(interaction_matrix.get_interaction_matrix_df())
118
+ # Showing df of interaction matrix
119
+
120
+
121
+ # Multiplicative Competitive Interaction Model:
122
+
123
+ mci_fit = huff_model.mci_fit()
124
+ # Fitting via MCI
125
+
126
+ mci_fit.summary()
127
+ # Summary of MCI model
128
+
129
+ mci_fit.marketareas()
130
+ # MCI model market simulation
131
+
132
+ mci_fit.get_market_areas_df()
133
+ # MCI model market areas
134
+
135
+
136
+ # Loading own interaction matrix:
137
+ # Data source: Wieland 2015 (https://nbn-resolving.org/urn:nbn:de:bvb:20-opus-180753)
138
+
139
+ Wieland2015_interaction_matrix = load_interaction_matrix(
140
+ data="data/Wieland2015.xlsx",
141
+ customer_origins_col="Quellort",
142
+ supply_locations_col="Zielort",
143
+ attraction_col=[
144
+ "VF",
145
+ "K",
146
+ "K_KKr"
147
+ ],
148
+ transport_costs_col="Dist_Min2",
149
+ probabilities_col="MA",
150
+ data_type="xlsx"
151
+ )
152
+
153
+ Wieland2015_interaction_matrix.summary()
154
+ # Summary of interaction matrix
101
155
 
102
- print(huff_model.get_market_areas_df())
156
+ Wieland2015_fit = Wieland2015_interaction_matrix.mci_fit(
157
+ cols=[
158
+ "A_j",
159
+ "t_ij",
160
+ "K",
161
+ "K_KKr"
162
+ ]
163
+ )
164
+ # Fitting MCI model with four independent variables
165
+
166
+ Wieland2015_fit.summary()
167
+ # MCI model summary
168
+
169
+
170
+ # Buffer analysis:
171
+
172
+ Haslach_supermarkets_gdf = Haslach_supermarkets.get_geodata_gpd_original()
173
+ Haslach_buffers = Haslach_buf.get_buffers()
174
+ # Extracting points and buffer polygons
175
+
176
+ spj_test = point_spatial_join(
177
+ polygon_gdf = Haslach_buffers,
178
+ point_gdf = Haslach_supermarkets_gdf,
179
+ polygon_ref_cols = ["BEZEICHN", "segment"],
180
+ point_stat_col = "VKF_qm"
181
+ )
182
+ # Spatial join with buffers and points
183
+ # Statistics for supermarkets by statistical districts
184
+
185
+ spj_test[0].to_file("spj_test.shp")
186
+ # Save joined points as shapefile
187
+
188
+ print(spj_test[1])
189
+ # Showing df with overlay statistics
190
+
191
+
192
+ # Creating map:
193
+
194
+ Haslach_gdf = Haslach.get_geodata_gpd_original()
195
+ Haslach_supermarkets_gdf = Haslach_supermarkets.get_geodata_gpd_original()
196
+ Haslach_supermarkets_gdf_iso = Haslach_supermarkets.get_isochrones_gdf()
197
+ # Extracttion geopandas.GeoDataFrames
198
+
199
+ map_with_basemap(
200
+ layers = [
201
+ Haslach_supermarkets_gdf_iso,
202
+ Haslach_gdf,
203
+ Haslach_supermarkets_gdf
204
+ ],
205
+ output_filepath = "Haslach_map.png"
206
+ )
207
+ # Map with three layers and OSM basemap
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: huff
3
- Version: 1.1.2
3
+ Version: 1.3.0
4
4
  Summary: huff: Huff Model Market Area Analysis
5
5
  Author: Thomas Wieland
6
6
  Author-email: geowieland@googlemail.com
@@ -8,6 +8,13 @@ Description-Content-Type: text/markdown
8
8
  Requires-Dist: geopandas
9
9
  Requires-Dist: pandas
10
10
  Requires-Dist: numpy
11
+ Requires-Dist: statsmodels
12
+ Requires-Dist: shapely
13
+ Requires-Dist: requests
14
+ Requires-Dist: matplotlib
15
+ Requires-Dist: pillow
16
+ Requires-Dist: contextily
17
+ Requires-Dist: openpyxl
11
18
 
12
19
  # huff: Huff Model Market Area Analysis
13
20
 
@@ -23,26 +30,39 @@ See the /tests directory for usage examples of most of the included functions.
23
30
  - **Huff Model**:
24
31
  - Defining origins and destinations with weightings
25
32
  - Creating interaction matrix from origins and destinations
26
- - Calculating basic Huff Model
33
+ - Market simulation with basic Huff Model
27
34
  - **Multiplicative Competitive Interaction Model**:
28
- - Log-centering transformation
29
- - **OpenRouteService Client** (Tools via API):
30
- - Creating transport costs matrix from origins and destinations
31
- - Creating isochrones from destinations
35
+ - Log-centering transformation of interaction matrix
36
+ - Fitting MCI model with >= 2 independent variables
37
+ - MCI model market simulation
38
+ - **GIS tools**:
39
+ - OpenRouteService Client (1):
40
+ - Creating transport costs matrix from origins and destinations
41
+ - Creating isochrones from origins and destinations
42
+ - OpenStreetMap Client (2):
43
+ - Creating simple maps with OSM basemap
44
+ - Other GIS tools:
45
+ - Creating buffers from geodata
46
+ - Spatial join with with statistics
47
+ - Creating euclidean distance matrix from origins and destinations
48
+ - Overlay-difference analysis of polygons
49
+ - **Data management tools**:
50
+ - Loading own interaction matrix for analysis
51
+ - Creating origins/destinations objects from point geodata
32
52
 
33
- Attribution of OpenRouteService:
34
- © openrouteservice.org by HeiGIT | Map data © OpenStreetMap contributors
35
-
36
- Visit https://openrouteservice.org/
53
+ (1) © openrouteservice.org by HeiGIT | Map data © OpenStreetMap contributors | https://openrouteservice.org/
54
+ (2) © OpenStreetMap contributors | available under the Open Database License | https://www.openstreetmap.org/
37
55
 
38
56
  ## Literature
39
57
  - Huff DL (1962) *Determination of Intra-Urban Retail Trade Areas*.
40
58
  - Huff DL (1964) Defining and estimating a trading area. *Journal of Marketing* 28(4): 34–38. [10.2307/1249154](https://doi.org/10.2307/1249154)
41
59
  - Huff DL, McCallum BM (2008) Calibrating the Huff Model using ArcGIS Business Analyst. ESRI White Paper, September 2008. https://www.esri.com/library/whitepapers/pdfs/calibrating-huff-model.pdf.
42
- - De Beule M, Van den Poel D, Van de Weghe N (2014) An extended Huff-model for robustly benchmarking and predicting retail network performance. *Applied Geography*,* 46(1): 80–89. [10.1016/j.apgeog.2013.09.026](https://doi.org/10.1016/j.apgeog.2013.09.026)
60
+ - De Beule M, Van den Poel D, Van de Weghe N (2014) An extended Huff-model for robustly benchmarking and predicting retail network performance. *Applied Geography* 46(1): 80–89. [10.1016/j.apgeog.2013.09.026](https://doi.org/10.1016/j.apgeog.2013.09.026)
43
61
  - Nakanishi M, Cooper LG (1974) Parameter estimation for a Multiplicative Competitive Interaction Model: Least squares approach. *Journal of Marketing Research* 11(3): 303–311. [10.2307/3151146](https://doi.org/10.2307/3151146).
62
+ - Nakanishi M, Cooper LG (1982) Technical Note — Simplified Estimation Procedures for MCI Models. *Marketing Science* 1(3): 314-322. [10.1287/mksc.1.3.314](https://doi.org/10.1287/mksc.1.3.314)
44
63
  - Wieland T (2017) Market Area Analysis for Retail and Service Locations with MCI. *R Journal* 9(1): 298-323. [10.32614/RJ-2017-020](https://doi.org/10.32614/RJ-2017-020)
45
64
  - Wieland T (2018) A Hurdle Model Approach of Store Choice and Market Area Analysis in Grocery Retailing. *Papers in Applied Geography* 4(4): 370-389. [10.1080/23754931.2018.1519458](https://doi.org/10.1080/23754931.2018.1519458)
65
+ - Wieland T (2023) Spatial shopping behavior during the Corona pandemic: insights from a micro-econometric store choice model for consumer electronics and furniture retailing in Germany. *Journal of Geographical Systems* 25(2): 291–326. [10.1007/s10109-023-00408-x](https://doi.org/10.1007/s10109-023-00408-x)
46
66
 
47
67
 
48
68
  ## Installation
@@ -1,9 +1,10 @@
1
1
  huff/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- huff/gistools.py,sha256=vuEpNC-IEIrNtzptdjzyvOP05qFbJYfeHpPZfo_OMvs,2721
3
- huff/models.py,sha256=yKty9d8nG05HSjNIAPiGNaDeEHLGySZhpNZJHnmPYdU,31300
4
- huff/ors.py,sha256=dkuVj14Jr69D2xp8NSi2QDkXNNLrudUs9f-i_UtKOdQ,11467
2
+ huff/gistools.py,sha256=yEinx-rq9o8yXW6CgVIVsb1Rwi0WKnvCDRGD52h8jxo,6857
3
+ huff/models.py,sha256=hqR3jcFBD2h3SPX_U7YApQrql7yBUw2frmLF4uhVTfI,59645
4
+ huff/ors.py,sha256=D38PN2Az9kUbAqsFpb4W5Z5pSkbKSOH1W5IBlhos7ZA,11570
5
+ huff/osm.py,sha256=mj9FFxieMd5Gv8Nn72nt89dHSfzZYGQt2dYHOWfx6hY,5464
5
6
  huff/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
- huff/tests/tests_huff.py,sha256=4AbQcD46iG7IsFioigVwRwOyeHMniYELIspMh-rCEHk,2515
7
+ huff/tests/tests_huff.py,sha256=kWUob6X7DvznSo6Nh9yaffkbdEt7XUK1s-JOVZBLz2g,5588
7
8
  huff/tests/data/Haslach.cpg,sha256=OtMDH1UDpEBK-CUmLugjLMBNTqZoPULF3QovKiesmCQ,5
8
9
  huff/tests/data/Haslach.dbf,sha256=GVPIt05OzDO7UrRDcsMhiYWvyXAPg6Z-qkiysFzj-fc,506
9
10
  huff/tests/data/Haslach.prj,sha256=2Jy1Vlzh7UxQ1MXpZ9UYLs2SxfrObj2xkEkZyLqmGTY,437
@@ -16,7 +17,8 @@ huff/tests/data/Haslach_supermarkets.prj,sha256=2Jy1Vlzh7UxQ1MXpZ9UYLs2SxfrObj2x
16
17
  huff/tests/data/Haslach_supermarkets.qmd,sha256=j9i4_Pz7ZMSG2UDSb3nuhJpw0KWXIRhiiDymqJP6_Fo,2479
17
18
  huff/tests/data/Haslach_supermarkets.shp,sha256=X7QbQ0BTMag_B-bDRbpr-go2BQIXo3Y8zMAKpYZmlps,324
18
19
  huff/tests/data/Haslach_supermarkets.shx,sha256=j23QHX-SmdAeN04rw0x8nUOran-OCg_T6r_LvzzEPWs,164
19
- huff-1.1.2.dist-info/METADATA,sha256=xTrAKBys0WpSPbu7ojxZ-gSTs5l3zQ0uKlnmiK3gvfA,2541
20
- huff-1.1.2.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
21
- huff-1.1.2.dist-info/top_level.txt,sha256=nlzX-PxZNFmIxANIJMySuIFPihd6qOBkRlhIC28NEsQ,5
22
- huff-1.1.2.dist-info/RECORD,,
20
+ huff/tests/data/Wieland2015.xlsx,sha256=SaVM-Hi5dBTmf2bzszMnZ2Ec8NUE05S_5F2lQj0ayS0,19641
21
+ huff-1.3.0.dist-info/METADATA,sha256=4qAsnzZ3lkPANRMjBrkPhj61HoBfC7cHkambNIZjSfI,3884
22
+ huff-1.3.0.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
23
+ huff-1.3.0.dist-info/top_level.txt,sha256=nlzX-PxZNFmIxANIJMySuIFPihd6qOBkRlhIC28NEsQ,5
24
+ huff-1.3.0.dist-info/RECORD,,
File without changes