huff 1.3.5__py3-none-any.whl → 1.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
huff/ors.py CHANGED
@@ -4,8 +4,8 @@
4
4
  # Author: Thomas Wieland
5
5
  # ORCID: 0000-0001-5168-9846
6
6
  # mail: geowieland@googlemail.com
7
- # Version: 1.3.5
8
- # Last update: 2025-06-03 17:24
7
+ # Version: 1.4.1
8
+ # Last update: 2025-06-16 17:44
9
9
  # Copyright (c) 2025 Thomas Wieland
10
10
  #-----------------------------------------------------------------------
11
11
 
huff/osm.py CHANGED
@@ -4,8 +4,8 @@
4
4
  # Author: Thomas Wieland
5
5
  # ORCID: 0000-0001-5168-9846
6
6
  # mail: geowieland@googlemail.com
7
- # Version: 1.3.5
8
- # Last update: 2025-06-03 17:24
7
+ # Version: 1.4.1
8
+ # Last update: 2025-06-16 17:44
9
9
  # Copyright (c) 2025 Thomas Wieland
10
10
  #-----------------------------------------------------------------------
11
11
 
@@ -19,6 +19,7 @@ import time
19
19
  import os
20
20
  from PIL import Image
21
21
  import matplotlib.pyplot as plt
22
+ from matplotlib.patches import Patch
22
23
  import contextily as cx
23
24
  from shapely.geometry import box
24
25
 
@@ -142,108 +143,124 @@ def map_with_basemap(
142
143
  styles: dict = {},
143
144
  save_output: bool = True,
144
145
  output_filepath: str = "osm_map_with_basemap.png",
145
- output_dpi = 300
146
- ):
147
-
146
+ output_dpi=300,
147
+ legend: bool = True
148
+ ):
148
149
  if not layers:
149
150
  raise ValueError("List layers is empty")
150
151
 
151
152
  combined = gpd.GeoDataFrame(
152
- pd.concat(
153
- layers,
154
- ignore_index = True
155
- ),
156
- crs = layers[0].crs
153
+ pd.concat(layers, ignore_index=True),
154
+ crs=layers[0].crs
157
155
  )
158
156
 
159
157
  combined_wgs84 = combined.to_crs(epsg=4326)
160
158
  bounds = combined_wgs84.total_bounds
161
159
 
162
160
  sw_lon, sw_lat, ne_lon, ne_lat = bounds[0]*0.9999, bounds[1]*0.9999, bounds[2]*1.0001, bounds[3]*1.0001
163
-
164
- if osm_basemap:
165
161
 
162
+ if osm_basemap:
166
163
  get_basemap(sw_lat, sw_lon, ne_lat, ne_lon, zoom=zoom)
167
164
 
168
165
  fig, ax = plt.subplots(figsize=(10, 10))
169
166
 
170
167
  if osm_basemap:
171
-
172
168
  img = Image.open("osm_map.png")
173
169
  extent_img = [sw_lon, ne_lon, sw_lat, ne_lat]
174
170
  ax.imshow(img, extent=extent_img, origin="upper")
175
171
 
176
172
  i = 0
173
+ legend_handles = []
174
+
177
175
  for layer in layers:
178
-
179
176
  layer_3857 = layer.to_crs(epsg=3857)
180
-
177
+
181
178
  if styles != {}:
182
-
183
179
  layer_style = styles[i]
184
180
  layer_color = layer_style["color"]
185
181
  layer_alpha = layer_style["alpha"]
186
-
182
+ layer_name = layer_style["name"]
183
+
187
184
  if isinstance(layer_color, str):
188
-
189
185
  layer_3857.plot(
190
186
  ax=ax,
191
187
  color=layer_color,
192
- alpha=layer_alpha
193
- )
194
-
188
+ alpha=layer_alpha,
189
+ label=layer_name
190
+ )
191
+ if legend:
192
+ patch = Patch(
193
+ facecolor=layer_color,
194
+ alpha=layer_alpha,
195
+ label=layer_name
196
+ )
197
+ legend_handles.append(patch)
198
+
195
199
  elif isinstance(layer_color, dict):
196
-
197
200
  color_key = list(layer_color.keys())[0]
198
201
  color_mapping = layer_color[color_key]
199
-
202
+
200
203
  if color_key not in layer_3857.columns:
201
- raise KeyError ("Column " + color_key + " not in layer.")
202
-
204
+ raise KeyError("Column " + color_key + " not in layer.")
205
+
203
206
  for value, color in color_mapping.items():
204
207
 
205
208
  subset = layer_3857[layer_3857[color_key].astype(str) == str(value)]
206
209
 
207
210
  if not subset.empty:
211
+
208
212
  subset.plot(
209
213
  ax=ax,
210
214
  color=color,
211
- alpha=layer_alpha
215
+ alpha=layer_alpha,
216
+ label=str(value)
212
217
  )
213
-
218
+
219
+ if legend:
220
+ patch = Patch(facecolor=color, alpha=layer_alpha, label=str(value))
221
+ legend_handles.append(patch)
222
+
214
223
  else:
215
-
216
- layer_3857.plot(
217
- ax=ax,
218
- alpha=0.6
219
- )
220
224
 
221
- i = i+1
225
+ layer_3857.plot(ax=ax, alpha=0.6, label=f"Layer {i+1}")
226
+
227
+ if legend:
228
+
229
+ patch = Patch(
230
+ facecolor="gray",
231
+ alpha=0.6,
232
+ label=f"Layer {i+1}"
233
+ )
234
+
235
+ legend_handles.append(patch)
236
+
237
+ i += 1
222
238
 
223
239
  bbox = box(sw_lon, sw_lat, ne_lon, ne_lat)
224
240
  extent_geom = gpd.GeoSeries([bbox], crs=4326).to_crs(epsg=3857).total_bounds
225
-
226
241
  ax.set_xlim(extent_geom[0], extent_geom[2])
227
242
  ax.set_ylim(extent_geom[1], extent_geom[3])
228
243
 
229
244
  if osm_basemap:
230
245
  cx.add_basemap(
231
- ax,
232
- source=cx.providers.OpenStreetMap.Mapnik,
246
+ ax,
247
+ source=cx.providers.OpenStreetMap.Mapnik,
233
248
  zoom=zoom
234
249
  )
235
250
 
236
251
  plt.axis('off')
252
+
253
+ if legend and legend_handles:
254
+ ax.legend(handles=legend_handles, loc='lower right', fontsize='small', frameon=True)
255
+
237
256
  plt.show()
238
-
257
+
239
258
  if save_output:
240
-
241
259
  plt.savefig(
242
- output_filepath,
243
- dpi = output_dpi,
260
+ output_filepath,
261
+ dpi=output_dpi,
244
262
  bbox_inches="tight"
245
- )
246
-
263
+ )
247
264
  plt.close()
248
265
 
249
266
  if os.path.exists("osm_map.png"):
@@ -0,0 +1 @@
1
+ UTF-8
@@ -0,0 +1 @@
1
+ PROJCS["DHDN_3_Degree_Gauss_Zone_3",GEOGCS["GCS_Deutsches_Hauptdreiecksnetz",DATUM["D_Deutsches_Hauptdreiecksnetz",SPHEROID["Bessel_1841",6377397.155,299.1528128]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Gauss_Kruger"],PARAMETER["False_Easting",3500000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",9.0],PARAMETER["Scale_Factor",1.0],PARAMETER["Latitude_Of_Origin",0.0],UNIT["Meter",1.0]]
@@ -0,0 +1,26 @@
1
+ <!DOCTYPE qgis PUBLIC 'http://mrcc.com/qgis.dtd' 'SYSTEM'>
2
+ <qgis version="3.28.3-Firenze">
3
+ <identifier></identifier>
4
+ <parentidentifier></parentidentifier>
5
+ <language></language>
6
+ <type></type>
7
+ <title></title>
8
+ <abstract></abstract>
9
+ <links/>
10
+ <fees></fees>
11
+ <encoding></encoding>
12
+ <crs>
13
+ <spatialrefsys nativeFormat="Wkt">
14
+ <wkt></wkt>
15
+ <proj4></proj4>
16
+ <srsid>0</srsid>
17
+ <srid>0</srid>
18
+ <authid></authid>
19
+ <description></description>
20
+ <projectionacronym></projectionacronym>
21
+ <ellipsoidacronym></ellipsoidacronym>
22
+ <geographicflag>false</geographicflag>
23
+ </spatialrefsys>
24
+ </crs>
25
+ <extent/>
26
+ </qgis>
huff/tests/tests_huff.py CHANGED
@@ -4,13 +4,13 @@
4
4
  # Author: Thomas Wieland
5
5
  # ORCID: 0000-0001-5168-9846
6
6
  # mail: geowieland@googlemail.com
7
- # Version: 1.3.5
8
- # Last update: 2025-06-03 17:24
7
+ # Version: 1.4.1
8
+ # Last update: 2025-06-16 17:43
9
9
  # Copyright (c) 2025 Thomas Wieland
10
10
  #-----------------------------------------------------------------------
11
11
 
12
12
 
13
- from huff.models import create_interaction_matrix, get_isochrones, load_geodata, load_interaction_matrix, modelfit
13
+ from huff.models import create_interaction_matrix, get_isochrones, load_geodata, load_interaction_matrix, market_shares, modelfit
14
14
  from huff.osm import map_with_basemap
15
15
  from huff.gistools import buffers, point_spatial_join
16
16
 
@@ -39,7 +39,11 @@ Haslach.define_marketsize("pop")
39
39
  # Definition of market size variable
40
40
 
41
41
  Haslach.define_transportcosts_weighting(
42
- param_lambda = -2.2
42
+ # param_lambda = -2.2,
43
+ # # one weighting parameter for power function (default)
44
+ param_lambda = [10, -0.5],
45
+ func="logistic"
46
+ # two weighting parameters for logistic function
43
47
  )
44
48
  # Definition of transport costs weighting (lambda)
45
49
 
@@ -73,7 +77,8 @@ Haslach_supermarkets.isochrones(
73
77
  save_output=True,
74
78
  ors_auth="5b3ce3597851110001cf62480a15aafdb5a64f4d91805929f8af6abd",
75
79
  output_filepath="Haslach_supermarkets_iso.shp",
76
- output_crs="EPSG:31467"
80
+ output_crs="EPSG:31467",
81
+ delay=0.2
77
82
  )
78
83
  # Obtaining isochrones for walking (5 and 10 minutes)
79
84
  # ORS API documentation: https://openrouteservice.org/dev/#/api-docs/v2/
@@ -95,31 +100,27 @@ haslach_interactionmatrix = create_interaction_matrix(
95
100
  )
96
101
  # Creating interaction matrix
97
102
 
98
- interaction_matrix = haslach_interactionmatrix.transport_costs(
103
+ haslach_interactionmatrix.transport_costs(
99
104
  ors_auth="5b3ce3597851110001cf62480a15aafdb5a64f4d91805929f8af6abd"
100
- #network=False
105
+ #network=False,
106
+ #distance_unit="meters",
107
+ # set network = True to calculate transport costs matrix via ORS API (default)
101
108
  )
102
109
  # Obtaining transport costs (default: driving-car)
103
110
  # ORS API documentation: https://openrouteservice.org/dev/#/api-docs/v2/
104
111
 
105
- interaction_matrix.summary()
112
+ haslach_interactionmatrix.summary()
106
113
  # Summary of interaction matrix
107
114
 
108
- print(interaction_matrix.hansen())
115
+ print(haslach_interactionmatrix.hansen())
109
116
  # Hansen accessibility for interaction matrix
110
117
 
111
- interaction_matrix = interaction_matrix.flows()
118
+ haslach_interactionmatrix.flows()
112
119
  # Calculating spatial flows for interaction matrix
113
120
 
114
- huff_model = interaction_matrix.marketareas()
115
- # Calculating total market areas for interaction matrix
116
- # Result of class HuffModel
117
-
118
- interaction_matrix = interaction_matrix.flows()
119
- # Calculating spatial flows
120
-
121
- huff_model = interaction_matrix.marketareas()
121
+ huff_model = haslach_interactionmatrix.marketareas()
122
122
  # Calculating total market areas
123
+ # Result of class HuffModel
123
124
 
124
125
  huff_model.summary()
125
126
  # Summary of Huff model
@@ -127,9 +128,62 @@ huff_model.summary()
127
128
  print(huff_model.get_market_areas_df())
128
129
  # Showing total market areas
129
130
 
130
- print(interaction_matrix.get_interaction_matrix_df())
131
+
132
+ # Maximum Likelihood fit for Huff Model:
133
+
134
+ haslach_interactionmatrix.ml_fit(
135
+ #initial_params=[1, -2],
136
+ initial_params=[1, 9, -0.6],
137
+ method="trust-constr",
138
+ #bounds = [(0.8, 0.9999),(-2.5, -1.5)],
139
+ bounds = [(0.8, 0.9999),(7, 11),(-0.9, -0.4)],
140
+ )
141
+ # Maximum Likelihood fit for Huff Model
142
+
143
+ haslach_interactionmatrix.summary()
144
+ # Summary of fitted ML-fitted interaction matrix (Huff model)
145
+
146
+ huff_model_fit = haslach_interactionmatrix.marketareas()
147
+ # Calculcation of total market areas
148
+ # Result of class HuffModel
149
+
150
+ huff_model_fit.summary()
151
+ # Huff model summary
152
+
153
+
154
+ # Adding new supply location:
155
+
156
+ Haslach_new_supermarket = load_geodata(
157
+ "data/Haslach_new_supermarket.shp",
158
+ location_type="destinations",
159
+ unique_id="LFDNR"
160
+ )
161
+ # Loading new supply locations (shapefile)
162
+
163
+ Haslach_new_supermarket.summary()
164
+ # Summary of new supply locations data
165
+
166
+ Haslach_supermarkets.add_new_destinations(Haslach_new_supermarket)
167
+ # Adding new supermarket to existing supply locations
168
+
169
+ Haslach_supermarkets.summary()
170
+ # Summary of updated supply locations
171
+
172
+ huff_model.update()
173
+ # Update interaction matrix
174
+
175
+ huff_model.summary()
176
+ # Summary of updated interaction matrix
177
+
178
+ print(huff_model.get_market_areas_df())
179
+ # Showing total market areas of model with estimated parameters and new destination
180
+
181
+ print(huff_model.get_interaction_matrix_df())
131
182
  # Showing df of interaction matrix
132
183
 
184
+ huff_model.get_interaction_matrix_df().to_excel("interaction_matrix_df.xlsx")
185
+ # Export of interaction matrix
186
+
133
187
 
134
188
  # Multiplicative Competitive Interaction Model:
135
189
 
@@ -176,10 +230,8 @@ Wieland2015_fit = Wieland2015_interaction_matrix.mci_fit(
176
230
  )
177
231
  # Fitting MCI model with four independent variables
178
232
 
179
- Wieland2015_fit.summary()
180
- # MCI model summary
181
-
182
233
  Wieland2015_fit.probabilities()
234
+ # Calculating probabilities
183
235
 
184
236
  Wieland2015_fit_interactionmatrix = Wieland2015_fit.get_interaction_matrix_df()
185
237
  # Export interaction matrix
@@ -246,30 +298,30 @@ map_with_basemap(
246
298
  Haslach_supermarkets_gdf
247
299
  ],
248
300
  styles={
249
- 0: {
301
+ 0: {"name": "Isochrones",
250
302
  "color": {
251
303
  "segm_min": {
252
- "3": "green",
253
- "6": "yellow",
254
- "9": "orange",
255
- "12": "red",
256
- "13": "darkred"
304
+ "3": "midnightblue",
305
+ "6": "blue",
306
+ "9": "dodgerblue",
307
+ "12": "deepskyblue",
308
+ "15": "aqua"
257
309
  }
258
310
  },
259
311
  "alpha": 0.3
260
312
  },
261
- 1: {
262
- "color": "green",
313
+ 1: {"name": "Districts",
314
+ "color": "black",
263
315
  "alpha": 1
264
316
  },
265
- 2: {
317
+ 2: {"name": "Supermarket chains",
266
318
  "color": {
267
319
  "Name": {
268
320
  "Aldi S├╝d": "blue",
269
321
  "Edeka": "yellow",
270
322
  "Lidl": "red",
271
323
  "Netto": "orange",
272
- "Real": "black",
324
+ "Real": "darkblue",
273
325
  "Treff 3000": "fuchsia"
274
326
  }
275
327
  },
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: huff
3
- Version: 1.3.5
3
+ Version: 1.4.1
4
4
  Summary: huff: Huff Model Market Area Analysis
5
5
  Author: Thomas Wieland
6
6
  Author-email: geowieland@googlemail.com
@@ -34,6 +34,8 @@ See the /tests directory for usage examples of most of the included functions.
34
34
  - Defining origins and destinations with weightings
35
35
  - Creating interaction matrix from origins and destinations
36
36
  - Market simulation with basic Huff Model
37
+ - Different function types: power, exponential, logistic
38
+ - Huff model parameter estimation via Maximum Likelihood (ML)
37
39
  - **Multiplicative Competitive Interaction Model**:
38
40
  - Log-centering transformation of interaction matrix
39
41
  - Fitting MCI model with >= 2 independent variables
@@ -60,12 +62,16 @@ See the /tests directory for usage examples of most of the included functions.
60
62
 
61
63
 
62
64
  ## Literature
63
- - Huff DL (1962) *Determination of Intra-Urban Retail Trade Areas*.
65
+ - De Beule M, Van den Poel D, Van de Weghe N (2014) An extended Huff-model for robustly benchmarking and predicting retail network performance. *Applied Geography* 46(1): 80–89. [10.1016/j.apgeog.2013.09.026](https://doi.org/10.1016/j.apgeog.2013.09.026)
66
+ - Haines Jr GH, Simon LS, Alexis M (1972) Maximum Likelihood Estimation of Central-City Food Trading Areas. *Journal of Marketing Research* 9(2): 154-159. [10.2307/3149948](https://doi.org/10.2307/3149948)
67
+ - Huff DL (1962) *Determination of Intra-Urban Retail Trade Areas*. Real Estate Research Program, Graduate Schools of Business Administration, University of California.
68
+ - Huff DL (1963) A Probabilistic Analysis of Shopping Center Trade Areas. *Land Economics* 39(1): 81-90. [10.2307/3144521](https://doi.org/10.2307/3144521)
64
69
  - Huff DL (1964) Defining and estimating a trading area. *Journal of Marketing* 28(4): 34–38. [10.2307/1249154](https://doi.org/10.2307/1249154)
65
70
  - Huff DL, McCallum BM (2008) Calibrating the Huff Model using ArcGIS Business Analyst. ESRI White Paper, September 2008. https://www.esri.com/library/whitepapers/pdfs/calibrating-huff-model.pdf.
66
- - De Beule M, Van den Poel D, Van de Weghe N (2014) An extended Huff-model for robustly benchmarking and predicting retail network performance. *Applied Geography* 46(1): 80–89. [10.1016/j.apgeog.2013.09.026](https://doi.org/10.1016/j.apgeog.2013.09.026)
67
71
  - Nakanishi M, Cooper LG (1974) Parameter estimation for a Multiplicative Competitive Interaction Model: Least squares approach. *Journal of Marketing Research* 11(3): 303–311. [10.2307/3151146](https://doi.org/10.2307/3151146).
68
72
  - Nakanishi M, Cooper LG (1982) Technical Note — Simplified Estimation Procedures for MCI Models. *Marketing Science* 1(3): 314-322. [10.1287/mksc.1.3.314](https://doi.org/10.1287/mksc.1.3.314)
73
+ - Orpana T, Lampinen J (2003) Building Spatial Choice Models from Aggregate Data. *Journal of Regional Science* 43(2): 319-348. [10.1111/1467-9787.00301](https://doi.org/10.1111/1467-9787.00301)
74
+ - Wieland T (2015) *Nahversorgung im Kontext raumökonomischer Entwicklungen im Lebensmitteleinzelhandel: Konzeption und Durchführung einer GIS-gestützten Analyse der Strukturen des Lebensmitteleinzelhandels und der Nahversorgung in Freiburg im Breisgau*. Working paper. Göttingen. https://webdoc.sub.gwdg.de/pub/mon/2015/5-wieland.pdf.
69
75
  - Wieland T (2017) Market Area Analysis for Retail and Service Locations with MCI. *R Journal* 9(1): 298-323. [10.32614/RJ-2017-020](https://doi.org/10.32614/RJ-2017-020)
70
76
  - Wieland T (2018) A Hurdle Model Approach of Store Choice and Market Area Analysis in Grocery Retailing. *Papers in Applied Geography* 4(4): 370-389. [10.1080/23754931.2018.1519458](https://doi.org/10.1080/23754931.2018.1519458)
71
77
  - Wieland T (2023) Spatial shopping behavior during the Corona pandemic: insights from a micro-econometric store choice model for consumer electronics and furniture retailing in Germany. *Journal of Geographical Systems* 25(2): 291–326. [10.1007/s10109-023-00408-x](https://doi.org/10.1007/s10109-023-00408-x)
@@ -1,16 +1,22 @@
1
1
  huff/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- huff/gistools.py,sha256=jvS1IOi1cIO4X1i8n3NeyGlufaobBtAb0nI8aNs1jrk,6857
3
- huff/models.py,sha256=T16UellqoDuEKcAq96Cn_TzEqihv9USIGivCOnbLZmw,66576
4
- huff/ors.py,sha256=_qMBsgoEzZMbagxcF3sUjQ-jFZrNjuNF23v8mlmhbZI,11929
5
- huff/osm.py,sha256=K_acRNHlKTOHVmMX-bHbA2_06QwfPLXpYKJcn03_gik,6862
2
+ huff/gistools.py,sha256=fgeE1IsUO7UIaawb23kuiz_Rlxn7T18iLLTA5yvgp74,7038
3
+ huff/models.py,sha256=3IxZLUp8-sC-sy0qJ677-cYEi09cqNOuOw_QBvr-K5s,89975
4
+ huff/ors.py,sha256=JlO2UEishQX87PIiktksOrVT5QdB-GEWgjXcxoR_KuA,11929
5
+ huff/osm.py,sha256=9A-7hxeZyjA2r8w2_IqqwH14qq2Y9AS1GxVKOD7utqs,7747
6
6
  huff/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
- huff/tests/tests_huff.py,sha256=v5ockdMFpq4C2hS6l3VAuWPtsKhWYI8l5AJC3wo-xZE,7795
7
+ huff/tests/tests_huff.py,sha256=nnOcZmhvEQMsnCf7YKnm-2vAY_h7FA7p7E2UPBDXLRU,9435
8
8
  huff/tests/data/Haslach.cpg,sha256=OtMDH1UDpEBK-CUmLugjLMBNTqZoPULF3QovKiesmCQ,5
9
9
  huff/tests/data/Haslach.dbf,sha256=GVPIt05OzDO7UrRDcsMhiYWvyXAPg6Z-qkiysFzj-fc,506
10
10
  huff/tests/data/Haslach.prj,sha256=2Jy1Vlzh7UxQ1MXpZ9UYLs2SxfrObj2xkEkZyLqmGTY,437
11
11
  huff/tests/data/Haslach.qmd,sha256=JlcOYzG4vI1NH1IuOpxwIPnJsCyC-pDRAI00TzEvNf0,2522
12
12
  huff/tests/data/Haslach.shp,sha256=s7ks-ukOIKMJCD5x6m0MO6pwkg1USvhudQKTg74ib1E,212
13
13
  huff/tests/data/Haslach.shx,sha256=VEMghRPP_HUYIuGoxR7X0eHQe9LnO4s8JP4twfzKyyk,132
14
+ huff/tests/data/Haslach_new_supermarket.cpg,sha256=OtMDH1UDpEBK-CUmLugjLMBNTqZoPULF3QovKiesmCQ,5
15
+ huff/tests/data/Haslach_new_supermarket.dbf,sha256=QFO7fq2F2dSOh-JEXF8lq_vZKfaPzCHNGCUtSkixbD4,3054
16
+ huff/tests/data/Haslach_new_supermarket.prj,sha256=2Jy1Vlzh7UxQ1MXpZ9UYLs2SxfrObj2xkEkZyLqmGTY,437
17
+ huff/tests/data/Haslach_new_supermarket.qmd,sha256=Yo2TZyuelYj3Uc00_Wa5a8ZfA-GeyRA1PsODQS9tRPg,666
18
+ huff/tests/data/Haslach_new_supermarket.shp,sha256=RYyoQPM-W-5Ifm9PV2H6oghhm954BMmZzZ9Q_WlUOPQ,128
19
+ huff/tests/data/Haslach_new_supermarket.shx,sha256=wSYoIeoHrdsiLqVPxRPA5ZBy7IQ7lx1BoXjV9fXEP9w,108
14
20
  huff/tests/data/Haslach_supermarkets.cpg,sha256=OtMDH1UDpEBK-CUmLugjLMBNTqZoPULF3QovKiesmCQ,5
15
21
  huff/tests/data/Haslach_supermarkets.dbf,sha256=4fTBxntDvQ8qFPdGK82ywJd2Xq_9nApDyi3h5_KPFSc,21282
16
22
  huff/tests/data/Haslach_supermarkets.prj,sha256=2Jy1Vlzh7UxQ1MXpZ9UYLs2SxfrObj2xkEkZyLqmGTY,437
@@ -18,7 +24,7 @@ huff/tests/data/Haslach_supermarkets.qmd,sha256=JlcOYzG4vI1NH1IuOpxwIPnJsCyC-pDR
18
24
  huff/tests/data/Haslach_supermarkets.shp,sha256=X7QbQ0BTMag_B-bDRbpr-go2BQIXo3Y8zMAKpYZmlps,324
19
25
  huff/tests/data/Haslach_supermarkets.shx,sha256=j23QHX-SmdAeN04rw0x8nUOran-OCg_T6r_LvzzEPWs,164
20
26
  huff/tests/data/Wieland2015.xlsx,sha256=SaVM-Hi5dBTmf2bzszMnZ2Ec8NUE05S_5F2lQj0ayS0,19641
21
- huff-1.3.5.dist-info/METADATA,sha256=tj7dFjaEWkaa3eLHLTu8QDb51L5Nlv1bKO-mUSra1Cg,4558
22
- huff-1.3.5.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
23
- huff-1.3.5.dist-info/top_level.txt,sha256=nlzX-PxZNFmIxANIJMySuIFPihd6qOBkRlhIC28NEsQ,5
24
- huff-1.3.5.dist-info/RECORD,,
27
+ huff-1.4.1.dist-info/METADATA,sha256=TMOldW_srTquKEghHkuMKyofG2MjUMUV4OKfdNUyFoU,5692
28
+ huff-1.4.1.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
29
+ huff-1.4.1.dist-info/top_level.txt,sha256=nlzX-PxZNFmIxANIJMySuIFPihd6qOBkRlhIC28NEsQ,5
30
+ huff-1.4.1.dist-info/RECORD,,
File without changes