voxcity 0.5.11__py3-none-any.whl → 0.5.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of voxcity might be problematic. Click here for more details.
- voxcity/downloader/osm.py +954 -646
- voxcity/simulator/solar.py +1422 -1371
- voxcity/simulator/view.py +16 -15
- voxcity/utils/visualization.py +2 -2
- {voxcity-0.5.11.dist-info → voxcity-0.5.13.dist-info}/METADATA +2 -3
- {voxcity-0.5.11.dist-info → voxcity-0.5.13.dist-info}/RECORD +10 -10
- {voxcity-0.5.11.dist-info → voxcity-0.5.13.dist-info}/WHEEL +1 -1
- {voxcity-0.5.11.dist-info → voxcity-0.5.13.dist-info}/licenses/AUTHORS.rst +0 -0
- {voxcity-0.5.11.dist-info → voxcity-0.5.13.dist-info}/licenses/LICENSE +0 -0
- {voxcity-0.5.11.dist-info → voxcity-0.5.13.dist-info}/top_level.txt +0 -0
voxcity/downloader/osm.py
CHANGED
|
@@ -1,647 +1,955 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Module for downloading and processing OpenStreetMap data.
|
|
3
|
-
|
|
4
|
-
This module provides functionality to download and process building footprints, land cover,
|
|
5
|
-
and other geographic features from OpenStreetMap. It handles downloading data via the Overpass API,
|
|
6
|
-
processing the responses, and converting them to standardized GeoJSON format with proper properties.
|
|
7
|
-
"""
|
|
8
|
-
|
|
9
|
-
import requests
|
|
10
|
-
from
|
|
11
|
-
from shapely.
|
|
12
|
-
|
|
13
|
-
import
|
|
14
|
-
|
|
15
|
-
import
|
|
16
|
-
import
|
|
17
|
-
from shapely.
|
|
18
|
-
|
|
19
|
-
import
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
#
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
#
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
#
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
def
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
#
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
#
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
|
|
646
|
-
|
|
1
|
+
"""
|
|
2
|
+
Module for downloading and processing OpenStreetMap data.
|
|
3
|
+
|
|
4
|
+
This module provides functionality to download and process building footprints, land cover,
|
|
5
|
+
and other geographic features from OpenStreetMap. It handles downloading data via the Overpass API,
|
|
6
|
+
processing the responses, and converting them to standardized GeoJSON format with proper properties.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import requests
|
|
10
|
+
from shapely.geometry import Polygon, shape, mapping
|
|
11
|
+
from shapely.ops import transform
|
|
12
|
+
import pyproj
|
|
13
|
+
from collections import defaultdict
|
|
14
|
+
import requests
|
|
15
|
+
import json
|
|
16
|
+
from shapely.geometry import shape, mapping, Polygon, LineString, Point, MultiPolygon
|
|
17
|
+
from shapely.ops import transform
|
|
18
|
+
import pyproj
|
|
19
|
+
import pandas as pd
|
|
20
|
+
import geopandas as gpd
|
|
21
|
+
|
|
22
|
+
def osm_json_to_geojson(osm_data):
|
|
23
|
+
"""
|
|
24
|
+
Convert OSM JSON data to GeoJSON format with proper handling of complex relations.
|
|
25
|
+
|
|
26
|
+
Args:
|
|
27
|
+
osm_data (dict): OSM JSON data from Overpass API
|
|
28
|
+
|
|
29
|
+
Returns:
|
|
30
|
+
dict: GeoJSON FeatureCollection
|
|
31
|
+
"""
|
|
32
|
+
features = []
|
|
33
|
+
|
|
34
|
+
# Create a mapping of node IDs to their coordinates
|
|
35
|
+
nodes = {}
|
|
36
|
+
ways = {}
|
|
37
|
+
|
|
38
|
+
# First pass: index all nodes and ways
|
|
39
|
+
for element in osm_data['elements']:
|
|
40
|
+
if element['type'] == 'node':
|
|
41
|
+
nodes[element['id']] = (element['lon'], element['lat'])
|
|
42
|
+
elif element['type'] == 'way':
|
|
43
|
+
ways[element['id']] = element
|
|
44
|
+
|
|
45
|
+
# Second pass: generate features
|
|
46
|
+
for element in osm_data['elements']:
|
|
47
|
+
if element['type'] == 'node' and 'tags' in element and element['tags']:
|
|
48
|
+
# Convert POI nodes to Point features
|
|
49
|
+
feature = {
|
|
50
|
+
'type': 'Feature',
|
|
51
|
+
'properties': {
|
|
52
|
+
'id': element['id'],
|
|
53
|
+
'type': 'node',
|
|
54
|
+
'tags': element.get('tags', {})
|
|
55
|
+
},
|
|
56
|
+
'geometry': {
|
|
57
|
+
'type': 'Point',
|
|
58
|
+
'coordinates': [element['lon'], element['lat']]
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
features.append(feature)
|
|
62
|
+
|
|
63
|
+
elif element['type'] == 'way' and 'nodes' in element:
|
|
64
|
+
# Skip ways that are part of relations - we'll handle those in relation processing
|
|
65
|
+
if is_part_of_relation(element['id'], osm_data):
|
|
66
|
+
continue
|
|
67
|
+
|
|
68
|
+
# Process standalone way
|
|
69
|
+
coords = get_way_coords(element, nodes)
|
|
70
|
+
if not coords or len(coords) < 2:
|
|
71
|
+
continue
|
|
72
|
+
|
|
73
|
+
# Determine if it's a polygon or a line
|
|
74
|
+
is_polygon = is_way_polygon(element)
|
|
75
|
+
|
|
76
|
+
# Make sure polygons have valid geometry (closed loop with at least 4 points)
|
|
77
|
+
if is_polygon:
|
|
78
|
+
# For closed ways, make sure first and last coordinates are the same
|
|
79
|
+
if coords[0] != coords[-1]:
|
|
80
|
+
coords.append(coords[0])
|
|
81
|
+
|
|
82
|
+
# Check if we have enough coordinates for a valid polygon (at least 4)
|
|
83
|
+
if len(coords) < 4:
|
|
84
|
+
# Not enough coordinates for a polygon, convert to LineString
|
|
85
|
+
is_polygon = False
|
|
86
|
+
|
|
87
|
+
feature = {
|
|
88
|
+
'type': 'Feature',
|
|
89
|
+
'properties': {
|
|
90
|
+
'id': element['id'],
|
|
91
|
+
'type': 'way',
|
|
92
|
+
'tags': element.get('tags', {})
|
|
93
|
+
},
|
|
94
|
+
'geometry': {
|
|
95
|
+
'type': 'Polygon' if is_polygon else 'LineString',
|
|
96
|
+
'coordinates': [coords] if is_polygon else coords
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
features.append(feature)
|
|
100
|
+
|
|
101
|
+
elif element['type'] == 'relation' and 'members' in element and 'tags' in element:
|
|
102
|
+
tags = element.get('tags', {})
|
|
103
|
+
|
|
104
|
+
# Process multipolygon relations
|
|
105
|
+
if tags.get('type') == 'multipolygon' or any(key in tags for key in ['natural', 'water', 'waterway']):
|
|
106
|
+
# Group member ways by role
|
|
107
|
+
members_by_role = {'outer': [], 'inner': []}
|
|
108
|
+
|
|
109
|
+
for member in element['members']:
|
|
110
|
+
if member['type'] == 'way' and member['ref'] in ways:
|
|
111
|
+
role = member['role']
|
|
112
|
+
if role not in ['outer', 'inner']:
|
|
113
|
+
role = 'outer' # Default to outer if role not specified
|
|
114
|
+
members_by_role[role].append(member['ref'])
|
|
115
|
+
|
|
116
|
+
# Skip if no outer members
|
|
117
|
+
if not members_by_role['outer']:
|
|
118
|
+
continue
|
|
119
|
+
|
|
120
|
+
# Create rings from member ways
|
|
121
|
+
outer_rings = create_rings_from_ways(members_by_role['outer'], ways, nodes)
|
|
122
|
+
inner_rings = create_rings_from_ways(members_by_role['inner'], ways, nodes)
|
|
123
|
+
|
|
124
|
+
# Skip if no valid outer rings
|
|
125
|
+
if not outer_rings:
|
|
126
|
+
continue
|
|
127
|
+
|
|
128
|
+
# Create feature based on number of outer rings
|
|
129
|
+
if len(outer_rings) == 1:
|
|
130
|
+
# Single polygon with possible inner rings
|
|
131
|
+
feature = {
|
|
132
|
+
'type': 'Feature',
|
|
133
|
+
'properties': {
|
|
134
|
+
'id': element['id'],
|
|
135
|
+
'type': 'relation',
|
|
136
|
+
'tags': tags
|
|
137
|
+
},
|
|
138
|
+
'geometry': {
|
|
139
|
+
'type': 'Polygon',
|
|
140
|
+
'coordinates': [outer_rings[0]] + inner_rings
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
else:
|
|
144
|
+
# MultiPolygon
|
|
145
|
+
# Each outer ring forms a polygon, and we assign inner rings to each polygon
|
|
146
|
+
# This is a simplification - proper assignment would check for containment
|
|
147
|
+
multipolygon_coords = []
|
|
148
|
+
for outer_ring in outer_rings:
|
|
149
|
+
polygon_coords = [outer_ring]
|
|
150
|
+
# For simplicity, assign all inner rings to the first polygon
|
|
151
|
+
# A more accurate implementation would check which outer ring contains each inner ring
|
|
152
|
+
if len(multipolygon_coords) == 0:
|
|
153
|
+
polygon_coords.extend(inner_rings)
|
|
154
|
+
multipolygon_coords.append(polygon_coords)
|
|
155
|
+
|
|
156
|
+
feature = {
|
|
157
|
+
'type': 'Feature',
|
|
158
|
+
'properties': {
|
|
159
|
+
'id': element['id'],
|
|
160
|
+
'type': 'relation',
|
|
161
|
+
'tags': tags
|
|
162
|
+
},
|
|
163
|
+
'geometry': {
|
|
164
|
+
'type': 'MultiPolygon',
|
|
165
|
+
'coordinates': multipolygon_coords
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
features.append(feature)
|
|
170
|
+
|
|
171
|
+
return {
|
|
172
|
+
'type': 'FeatureCollection',
|
|
173
|
+
'features': features
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
def is_part_of_relation(way_id, osm_data):
|
|
177
|
+
"""Check if a way is part of any relation."""
|
|
178
|
+
for element in osm_data['elements']:
|
|
179
|
+
if element['type'] == 'relation' and 'members' in element:
|
|
180
|
+
for member in element['members']:
|
|
181
|
+
if member['type'] == 'way' and member['ref'] == way_id:
|
|
182
|
+
return True
|
|
183
|
+
return False
|
|
184
|
+
|
|
185
|
+
def is_way_polygon(way):
|
|
186
|
+
"""Determine if a way should be treated as a polygon."""
|
|
187
|
+
# Check if the way is closed (first and last nodes are the same)
|
|
188
|
+
if 'nodes' in way and way['nodes'][0] == way['nodes'][-1]:
|
|
189
|
+
# Check for tags that indicate this is an area
|
|
190
|
+
if 'tags' in way:
|
|
191
|
+
tags = way['tags']
|
|
192
|
+
if 'building' in tags or ('area' in tags and tags['area'] == 'yes'):
|
|
193
|
+
return True
|
|
194
|
+
if any(k in tags for k in ['landuse', 'natural', 'water', 'leisure', 'amenity']):
|
|
195
|
+
return True
|
|
196
|
+
return False
|
|
197
|
+
|
|
198
|
+
def get_way_coords(way, nodes):
|
|
199
|
+
"""Get coordinates for a way."""
|
|
200
|
+
coords = []
|
|
201
|
+
if 'nodes' not in way:
|
|
202
|
+
return coords
|
|
203
|
+
|
|
204
|
+
for node_id in way['nodes']:
|
|
205
|
+
if node_id in nodes:
|
|
206
|
+
coords.append(nodes[node_id])
|
|
207
|
+
else:
|
|
208
|
+
# Missing node - skip this way
|
|
209
|
+
return []
|
|
210
|
+
|
|
211
|
+
return coords
|
|
212
|
+
|
|
213
|
+
def create_rings_from_ways(way_ids, ways, nodes):
|
|
214
|
+
"""
|
|
215
|
+
Create continuous rings by connecting ways.
|
|
216
|
+
|
|
217
|
+
Args:
|
|
218
|
+
way_ids: List of way IDs that make up the ring(s)
|
|
219
|
+
ways: Dictionary mapping way IDs to way elements
|
|
220
|
+
nodes: Dictionary mapping node IDs to coordinates
|
|
221
|
+
|
|
222
|
+
Returns:
|
|
223
|
+
List of rings, where each ring is a list of coordinates
|
|
224
|
+
"""
|
|
225
|
+
if not way_ids:
|
|
226
|
+
return []
|
|
227
|
+
|
|
228
|
+
# Extract node IDs for each way
|
|
229
|
+
way_nodes = {}
|
|
230
|
+
for way_id in way_ids:
|
|
231
|
+
if way_id in ways and 'nodes' in ways[way_id]:
|
|
232
|
+
way_nodes[way_id] = ways[way_id]['nodes']
|
|
233
|
+
|
|
234
|
+
# If we have no valid ways, return empty list
|
|
235
|
+
if not way_nodes:
|
|
236
|
+
return []
|
|
237
|
+
|
|
238
|
+
# Connect the ways to form rings
|
|
239
|
+
rings = []
|
|
240
|
+
unused_ways = set(way_nodes.keys())
|
|
241
|
+
|
|
242
|
+
while unused_ways:
|
|
243
|
+
# Start a new ring with the first unused way
|
|
244
|
+
current_way_id = next(iter(unused_ways))
|
|
245
|
+
unused_ways.remove(current_way_id)
|
|
246
|
+
|
|
247
|
+
# Get the first and last node IDs of the current way
|
|
248
|
+
current_nodes = way_nodes[current_way_id]
|
|
249
|
+
if not current_nodes:
|
|
250
|
+
continue
|
|
251
|
+
|
|
252
|
+
# Start building a ring with the nodes of the first way
|
|
253
|
+
ring_nodes = list(current_nodes)
|
|
254
|
+
|
|
255
|
+
# Try to connect more ways to complete the ring
|
|
256
|
+
connected = True
|
|
257
|
+
while connected and unused_ways:
|
|
258
|
+
connected = False
|
|
259
|
+
|
|
260
|
+
# Get the first and last nodes of the current ring
|
|
261
|
+
first_node = ring_nodes[0]
|
|
262
|
+
last_node = ring_nodes[-1]
|
|
263
|
+
|
|
264
|
+
# Try to find a way that connects to either end of our ring
|
|
265
|
+
for way_id in list(unused_ways):
|
|
266
|
+
nodes_in_way = way_nodes[way_id]
|
|
267
|
+
if not nodes_in_way:
|
|
268
|
+
unused_ways.remove(way_id)
|
|
269
|
+
continue
|
|
270
|
+
|
|
271
|
+
# Check if this way connects at the start of our ring
|
|
272
|
+
if nodes_in_way[-1] == first_node:
|
|
273
|
+
# This way connects to the start of our ring (reversed)
|
|
274
|
+
ring_nodes = nodes_in_way[:-1] + ring_nodes
|
|
275
|
+
unused_ways.remove(way_id)
|
|
276
|
+
connected = True
|
|
277
|
+
break
|
|
278
|
+
elif nodes_in_way[0] == first_node:
|
|
279
|
+
# This way connects to the start of our ring
|
|
280
|
+
ring_nodes = list(reversed(nodes_in_way))[:-1] + ring_nodes
|
|
281
|
+
unused_ways.remove(way_id)
|
|
282
|
+
connected = True
|
|
283
|
+
break
|
|
284
|
+
# Check if this way connects at the end of our ring
|
|
285
|
+
elif nodes_in_way[0] == last_node:
|
|
286
|
+
# This way connects to the end of our ring
|
|
287
|
+
ring_nodes.extend(nodes_in_way[1:])
|
|
288
|
+
unused_ways.remove(way_id)
|
|
289
|
+
connected = True
|
|
290
|
+
break
|
|
291
|
+
elif nodes_in_way[-1] == last_node:
|
|
292
|
+
# This way connects to the end of our ring (reversed)
|
|
293
|
+
ring_nodes.extend(list(reversed(nodes_in_way))[1:])
|
|
294
|
+
unused_ways.remove(way_id)
|
|
295
|
+
connected = True
|
|
296
|
+
break
|
|
297
|
+
|
|
298
|
+
# Check if the ring is closed (first node equals last node)
|
|
299
|
+
if ring_nodes and ring_nodes[0] == ring_nodes[-1] and len(ring_nodes) >= 4:
|
|
300
|
+
# Convert node IDs to coordinates
|
|
301
|
+
ring_coords = []
|
|
302
|
+
for node_id in ring_nodes:
|
|
303
|
+
if node_id in nodes:
|
|
304
|
+
ring_coords.append(nodes[node_id])
|
|
305
|
+
else:
|
|
306
|
+
# Missing node - skip this ring
|
|
307
|
+
ring_coords = []
|
|
308
|
+
break
|
|
309
|
+
|
|
310
|
+
if ring_coords and len(ring_coords) >= 4:
|
|
311
|
+
rings.append(ring_coords)
|
|
312
|
+
else:
|
|
313
|
+
# Try to close the ring if it's almost complete
|
|
314
|
+
if ring_nodes and len(ring_nodes) >= 3 and ring_nodes[0] != ring_nodes[-1]:
|
|
315
|
+
ring_nodes.append(ring_nodes[0])
|
|
316
|
+
|
|
317
|
+
# Convert node IDs to coordinates
|
|
318
|
+
ring_coords = []
|
|
319
|
+
for node_id in ring_nodes:
|
|
320
|
+
if node_id in nodes:
|
|
321
|
+
ring_coords.append(nodes[node_id])
|
|
322
|
+
else:
|
|
323
|
+
# Missing node - skip this ring
|
|
324
|
+
ring_coords = []
|
|
325
|
+
break
|
|
326
|
+
|
|
327
|
+
if ring_coords and len(ring_coords) >= 4:
|
|
328
|
+
rings.append(ring_coords)
|
|
329
|
+
|
|
330
|
+
return rings
|
|
331
|
+
|
|
332
|
+
def load_gdf_from_openstreetmap(rectangle_vertices):
|
|
333
|
+
"""Download and process building footprint data from OpenStreetMap.
|
|
334
|
+
|
|
335
|
+
Args:
|
|
336
|
+
rectangle_vertices: List of (lon, lat) coordinates defining the bounding box
|
|
337
|
+
|
|
338
|
+
Returns:
|
|
339
|
+
geopandas.GeoDataFrame: GeoDataFrame containing building footprints with standardized properties
|
|
340
|
+
"""
|
|
341
|
+
# Create a bounding box from the rectangle vertices
|
|
342
|
+
min_lon = min(v[0] for v in rectangle_vertices)
|
|
343
|
+
max_lon = max(v[0] for v in rectangle_vertices)
|
|
344
|
+
min_lat = min(v[1] for v in rectangle_vertices)
|
|
345
|
+
max_lat = max(v[1] for v in rectangle_vertices)
|
|
346
|
+
|
|
347
|
+
# Enhanced Overpass API query with recursive member extraction
|
|
348
|
+
overpass_url = "http://overpass-api.de/api/interpreter"
|
|
349
|
+
overpass_query = f"""
|
|
350
|
+
[out:json];
|
|
351
|
+
(
|
|
352
|
+
way["building"]({min_lat},{min_lon},{max_lat},{max_lon});
|
|
353
|
+
way["building:part"]({min_lat},{min_lon},{max_lat},{max_lon});
|
|
354
|
+
relation["building"]({min_lat},{min_lon},{max_lat},{max_lon});
|
|
355
|
+
way["tourism"="artwork"]["area"="yes"]({min_lat},{min_lon},{max_lat},{max_lon});
|
|
356
|
+
relation["tourism"="artwork"]["area"="yes"]({min_lat},{min_lon},{max_lat},{max_lon});
|
|
357
|
+
);
|
|
358
|
+
(._; >;); // Recursively get all nodes, ways, and relations within relations
|
|
359
|
+
out geom;
|
|
360
|
+
"""
|
|
361
|
+
|
|
362
|
+
# Send the request to the Overpass API
|
|
363
|
+
response = requests.get(overpass_url, params={'data': overpass_query})
|
|
364
|
+
data = response.json()
|
|
365
|
+
|
|
366
|
+
# Build a mapping from (type, id) to element
|
|
367
|
+
id_map = {}
|
|
368
|
+
for element in data['elements']:
|
|
369
|
+
id_map[(element['type'], element['id'])] = element
|
|
370
|
+
|
|
371
|
+
# Process the response and create features list
|
|
372
|
+
features = []
|
|
373
|
+
|
|
374
|
+
def process_coordinates(geometry):
|
|
375
|
+
"""Helper function to process and reverse coordinate pairs.
|
|
376
|
+
|
|
377
|
+
Args:
|
|
378
|
+
geometry: List of coordinate pairs to process
|
|
379
|
+
|
|
380
|
+
Returns:
|
|
381
|
+
list: Processed coordinate pairs with reversed order
|
|
382
|
+
"""
|
|
383
|
+
return [coord for coord in geometry] # Keep original order since already (lon, lat)
|
|
384
|
+
|
|
385
|
+
def get_height_from_properties(properties):
|
|
386
|
+
"""Helper function to extract height from properties.
|
|
387
|
+
|
|
388
|
+
Args:
|
|
389
|
+
properties: Dictionary of feature properties
|
|
390
|
+
|
|
391
|
+
Returns:
|
|
392
|
+
float: Extracted or calculated height value
|
|
393
|
+
"""
|
|
394
|
+
height = properties.get('height', properties.get('building:height', None))
|
|
395
|
+
if height is not None:
|
|
396
|
+
try:
|
|
397
|
+
return float(height)
|
|
398
|
+
except ValueError:
|
|
399
|
+
pass
|
|
400
|
+
|
|
401
|
+
return 0 # Default height if no valid height found
|
|
402
|
+
|
|
403
|
+
def extract_properties(element):
|
|
404
|
+
"""Helper function to extract and process properties from an element.
|
|
405
|
+
|
|
406
|
+
Args:
|
|
407
|
+
element: OSM element containing tags and properties
|
|
408
|
+
|
|
409
|
+
Returns:
|
|
410
|
+
dict: Processed properties dictionary
|
|
411
|
+
"""
|
|
412
|
+
properties = element.get('tags', {})
|
|
413
|
+
|
|
414
|
+
# Get height (now using the helper function)
|
|
415
|
+
height = get_height_from_properties(properties)
|
|
416
|
+
|
|
417
|
+
# Get min_height and min_level
|
|
418
|
+
min_height = properties.get('min_height', '0')
|
|
419
|
+
min_level = properties.get('building:min_level', properties.get('min_level', '0'))
|
|
420
|
+
try:
|
|
421
|
+
min_height = float(min_height)
|
|
422
|
+
except ValueError:
|
|
423
|
+
min_height = 0
|
|
424
|
+
|
|
425
|
+
levels = properties.get('building:levels', properties.get('levels', None))
|
|
426
|
+
try:
|
|
427
|
+
levels = float(levels) if levels is not None else None
|
|
428
|
+
except ValueError:
|
|
429
|
+
levels = None
|
|
430
|
+
|
|
431
|
+
# Extract additional properties, including those relevant to artworks
|
|
432
|
+
extracted_props = {
|
|
433
|
+
"id": element['id'],
|
|
434
|
+
"height": height,
|
|
435
|
+
"min_height": min_height,
|
|
436
|
+
"confidence": -1.0,
|
|
437
|
+
"is_inner": False,
|
|
438
|
+
"levels": levels,
|
|
439
|
+
"height_source": "explicit" if properties.get('height') or properties.get('building:height')
|
|
440
|
+
else "levels" if levels is not None
|
|
441
|
+
else "default",
|
|
442
|
+
"min_level": min_level if min_level != '0' else None,
|
|
443
|
+
"building": properties.get('building', 'no'),
|
|
444
|
+
"building_part": properties.get('building:part', 'no'),
|
|
445
|
+
"building_material": properties.get('building:material'),
|
|
446
|
+
"building_colour": properties.get('building:colour'),
|
|
447
|
+
"roof_shape": properties.get('roof:shape'),
|
|
448
|
+
"roof_material": properties.get('roof:material'),
|
|
449
|
+
"roof_angle": properties.get('roof:angle'),
|
|
450
|
+
"roof_colour": properties.get('roof:colour'),
|
|
451
|
+
"roof_direction": properties.get('roof:direction'),
|
|
452
|
+
"architect": properties.get('architect'),
|
|
453
|
+
"start_date": properties.get('start_date'),
|
|
454
|
+
"name": properties.get('name'),
|
|
455
|
+
"name:en": properties.get('name:en'),
|
|
456
|
+
"name:es": properties.get('name:es'),
|
|
457
|
+
"email": properties.get('email'),
|
|
458
|
+
"phone": properties.get('phone'),
|
|
459
|
+
"wheelchair": properties.get('wheelchair'),
|
|
460
|
+
"tourism": properties.get('tourism'),
|
|
461
|
+
"artwork_type": properties.get('artwork_type'),
|
|
462
|
+
"area": properties.get('area'),
|
|
463
|
+
"layer": properties.get('layer')
|
|
464
|
+
}
|
|
465
|
+
|
|
466
|
+
# Remove None values to keep the properties clean
|
|
467
|
+
return {k: v for k, v in extracted_props.items() if v is not None}
|
|
468
|
+
|
|
469
|
+
def create_polygon_feature(coords, properties, is_inner=False):
|
|
470
|
+
"""Helper function to create a polygon feature.
|
|
471
|
+
|
|
472
|
+
Args:
|
|
473
|
+
coords: List of coordinate pairs defining the polygon
|
|
474
|
+
properties: Dictionary of feature properties
|
|
475
|
+
is_inner: Boolean indicating if this is an inner ring
|
|
476
|
+
|
|
477
|
+
Returns:
|
|
478
|
+
dict: GeoJSON Feature object or None if invalid
|
|
479
|
+
"""
|
|
480
|
+
if len(coords) >= 4:
|
|
481
|
+
properties = properties.copy()
|
|
482
|
+
properties["is_inner"] = is_inner
|
|
483
|
+
return {
|
|
484
|
+
"type": "Feature",
|
|
485
|
+
"properties": properties,
|
|
486
|
+
"geometry": {
|
|
487
|
+
"type": "Polygon",
|
|
488
|
+
"coordinates": [process_coordinates(coords)]
|
|
489
|
+
}
|
|
490
|
+
}
|
|
491
|
+
return None
|
|
492
|
+
|
|
493
|
+
# Process each element, handling relations and their way members
|
|
494
|
+
for element in data['elements']:
|
|
495
|
+
if element['type'] == 'way':
|
|
496
|
+
if 'geometry' in element:
|
|
497
|
+
coords = [(node['lon'], node['lat']) for node in element['geometry']]
|
|
498
|
+
properties = extract_properties(element)
|
|
499
|
+
feature = create_polygon_feature(coords, properties)
|
|
500
|
+
if feature:
|
|
501
|
+
features.append(feature)
|
|
502
|
+
|
|
503
|
+
elif element['type'] == 'relation':
|
|
504
|
+
properties = extract_properties(element)
|
|
505
|
+
|
|
506
|
+
# Process each member of the relation
|
|
507
|
+
for member in element['members']:
|
|
508
|
+
if member['type'] == 'way':
|
|
509
|
+
# Look up the way in id_map
|
|
510
|
+
way = id_map.get(('way', member['ref']))
|
|
511
|
+
if way and 'geometry' in way:
|
|
512
|
+
coords = [(node['lon'], node['lat']) for node in way['geometry']]
|
|
513
|
+
is_inner = member['role'] == 'inner'
|
|
514
|
+
member_properties = properties.copy()
|
|
515
|
+
member_properties['member_id'] = way['id'] # Include id of the way
|
|
516
|
+
feature = create_polygon_feature(coords, member_properties, is_inner)
|
|
517
|
+
if feature:
|
|
518
|
+
feature['properties']['role'] = member['role']
|
|
519
|
+
features.append(feature)
|
|
520
|
+
|
|
521
|
+
# Convert features list to GeoDataFrame
|
|
522
|
+
if not features:
|
|
523
|
+
return gpd.GeoDataFrame()
|
|
524
|
+
|
|
525
|
+
geometries = []
|
|
526
|
+
properties_list = []
|
|
527
|
+
|
|
528
|
+
for feature in features:
|
|
529
|
+
geometries.append(shape(feature['geometry']))
|
|
530
|
+
properties_list.append(feature['properties'])
|
|
531
|
+
|
|
532
|
+
gdf = gpd.GeoDataFrame(properties_list, geometry=geometries, crs="EPSG:4326")
|
|
533
|
+
return gdf
|
|
534
|
+
|
|
535
|
+
def convert_feature(feature):
|
|
536
|
+
"""Convert a GeoJSON feature to the desired format with height information.
|
|
537
|
+
|
|
538
|
+
Args:
|
|
539
|
+
feature (dict): Input GeoJSON feature
|
|
540
|
+
|
|
541
|
+
Returns:
|
|
542
|
+
dict: Converted feature with height and confidence values, or None if invalid
|
|
543
|
+
"""
|
|
544
|
+
new_feature = {}
|
|
545
|
+
new_feature['type'] = 'Feature'
|
|
546
|
+
new_feature['properties'] = {}
|
|
547
|
+
new_feature['geometry'] = {}
|
|
548
|
+
|
|
549
|
+
# Convert geometry
|
|
550
|
+
geometry = feature['geometry']
|
|
551
|
+
geom_type = geometry['type']
|
|
552
|
+
|
|
553
|
+
# Convert MultiPolygon to Polygon if necessary
|
|
554
|
+
if geom_type == 'MultiPolygon':
|
|
555
|
+
# Flatten MultiPolygon to Polygon by taking the first polygon
|
|
556
|
+
# Alternatively, you can merge all polygons into one if needed
|
|
557
|
+
coordinates = geometry['coordinates'][0] # Take the first polygon
|
|
558
|
+
if len(coordinates[0]) < 3:
|
|
559
|
+
return None
|
|
560
|
+
elif geom_type == 'Polygon':
|
|
561
|
+
coordinates = geometry['coordinates']
|
|
562
|
+
if len(coordinates[0]) < 3:
|
|
563
|
+
return None
|
|
564
|
+
else:
|
|
565
|
+
# Skip features that are not polygons
|
|
566
|
+
return None
|
|
567
|
+
|
|
568
|
+
# Reformat coordinates: convert lists to tuples
|
|
569
|
+
new_coordinates = []
|
|
570
|
+
for ring in coordinates:
|
|
571
|
+
new_ring = []
|
|
572
|
+
for coord in ring:
|
|
573
|
+
# Swap the order if needed (assuming original is [lat, lon])
|
|
574
|
+
lat, lon = coord
|
|
575
|
+
new_ring.append((lon, lat)) # Changed to (lon, lat)
|
|
576
|
+
new_coordinates.append(new_ring)
|
|
577
|
+
|
|
578
|
+
new_feature['geometry']['type'] = 'Polygon'
|
|
579
|
+
new_feature['geometry']['coordinates'] = new_coordinates
|
|
580
|
+
|
|
581
|
+
# Process properties
|
|
582
|
+
properties = feature.get('properties', {})
|
|
583
|
+
height = properties.get('height')
|
|
584
|
+
|
|
585
|
+
# If height is not available, estimate it based on building levels
|
|
586
|
+
if not height:
|
|
587
|
+
levels = properties.get('building:levels')
|
|
588
|
+
if levels:
|
|
589
|
+
if type(levels)==str:
|
|
590
|
+
# If levels is a string (invalid format), use default height
|
|
591
|
+
height = 10.0 # Default height in meters
|
|
592
|
+
else:
|
|
593
|
+
# Calculate height based on number of levels
|
|
594
|
+
height = float(levels) * 3.0 # Assume 3m per level
|
|
595
|
+
else:
|
|
596
|
+
# No level information available, use default height
|
|
597
|
+
height = 10.0 # Default height in meters
|
|
598
|
+
|
|
599
|
+
new_feature['properties']['height'] = float(height)
|
|
600
|
+
new_feature['properties']['confidence'] = -1.0 # Confidence score for height estimate
|
|
601
|
+
|
|
602
|
+
return new_feature
|
|
603
|
+
|
|
604
|
+
|
|
605
|
+
# Classification mapping defines the land cover/use classes and their associated tags
|
|
606
|
+
# The numbers (0-13) represent class codes used in the system
|
|
607
|
+
classification_mapping = {
|
|
608
|
+
11: {'name': 'Road', 'tags': ['highway', 'road', 'path', 'track', 'street']},
|
|
609
|
+
12: {'name': 'Building', 'tags': ['building', 'house', 'apartment', 'commercial_building', 'industrial_building']},
|
|
610
|
+
10: {'name': 'Developed space', 'tags': ['industrial', 'retail', 'commercial', 'residential', 'construction', 'railway', 'parking', 'islet', 'island']},
|
|
611
|
+
0: {'name': 'Bareland', 'tags': ['quarry', 'brownfield', 'bare_rock', 'scree', 'shingle', 'rock', 'sand', 'desert', 'landfill', 'beach']},
|
|
612
|
+
1: {'name': 'Rangeland', 'tags': ['grass', 'meadow', 'grassland', 'heath', 'garden', 'park']},
|
|
613
|
+
2: {'name': 'Shrub', 'tags': ['scrub', 'shrubland', 'bush', 'thicket']},
|
|
614
|
+
3: {'name': 'Agriculture land', 'tags': ['farmland', 'orchard', 'vineyard', 'plant_nursery', 'greenhouse_horticulture', 'flowerbed', 'allotments', 'cropland']},
|
|
615
|
+
4: {'name': 'Tree', 'tags': ['wood', 'forest', 'tree', 'tree_row', 'tree_canopy']},
|
|
616
|
+
5: {'name': 'Moss and lichen', 'tags': ['moss', 'lichen', 'tundra_vegetation']},
|
|
617
|
+
6: {'name': 'Wet land', 'tags': ['wetland', 'marsh', 'swamp', 'bog', 'fen', 'flooded_vegetation']},
|
|
618
|
+
7: {'name': 'Mangrove', 'tags': ['mangrove', 'mangrove_forest', 'mangrove_swamp']},
|
|
619
|
+
8: {'name': 'Water', 'tags': ['water', 'waterway', 'reservoir', 'basin', 'bay', 'ocean', 'sea', 'river', 'lake']},
|
|
620
|
+
9: {'name': 'Snow and ice', 'tags': ['glacier', 'snow', 'ice', 'snowfield', 'ice_shelf']},
|
|
621
|
+
13: {'name': 'No Data', 'tags': ['unknown', 'no_data', 'clouds', 'undefined']}
|
|
622
|
+
}
|
|
623
|
+
|
|
624
|
+
# Maps classification tags to specific OSM key-value pairs
|
|
625
|
+
# '*' means match any value for that key
|
|
626
|
+
tag_osm_key_value_mapping = {
|
|
627
|
+
# Road
|
|
628
|
+
'highway': {'highway': '*'},
|
|
629
|
+
'road': {'highway': '*'},
|
|
630
|
+
'path': {'highway': 'path'},
|
|
631
|
+
'track': {'highway': 'track'},
|
|
632
|
+
'street': {'highway': '*'},
|
|
633
|
+
|
|
634
|
+
# Building
|
|
635
|
+
'building': {'building': '*'},
|
|
636
|
+
'house': {'building': 'house'},
|
|
637
|
+
'apartment': {'building': 'apartments'},
|
|
638
|
+
'commercial_building': {'building': 'commercial'},
|
|
639
|
+
'industrial_building': {'building': 'industrial'},
|
|
640
|
+
|
|
641
|
+
# Developed space
|
|
642
|
+
'industrial': {'landuse': 'industrial'},
|
|
643
|
+
'retail': {'landuse': 'retail'},
|
|
644
|
+
'commercial': {'landuse': 'commercial'},
|
|
645
|
+
'residential': {'landuse': 'residential'},
|
|
646
|
+
'construction': {'landuse': 'construction'},
|
|
647
|
+
'railway': {'landuse': 'railway'},
|
|
648
|
+
'parking': {'amenity': 'parking'},
|
|
649
|
+
'islet': {'place': 'islet'},
|
|
650
|
+
'island': {'place': 'island'},
|
|
651
|
+
|
|
652
|
+
# Bareland
|
|
653
|
+
'quarry': {'landuse': 'quarry'},
|
|
654
|
+
'brownfield': {'landuse': 'brownfield'},
|
|
655
|
+
'bare_rock': {'natural': 'bare_rock'},
|
|
656
|
+
'scree': {'natural': 'scree'},
|
|
657
|
+
'shingle': {'natural': 'shingle'},
|
|
658
|
+
'rock': {'natural': 'rock'},
|
|
659
|
+
'sand': {'natural': 'sand'},
|
|
660
|
+
'desert': {'natural': 'desert'},
|
|
661
|
+
'landfill': {'landuse': 'landfill'},
|
|
662
|
+
'beach': {'natural': 'beach'},
|
|
663
|
+
|
|
664
|
+
# Rangeland
|
|
665
|
+
'grass': {'landuse': 'grass'},
|
|
666
|
+
'meadow': {'landuse': 'meadow'},
|
|
667
|
+
'grassland': {'natural': 'grassland'},
|
|
668
|
+
'heath': {'natural': 'heath'},
|
|
669
|
+
'garden': {'leisure': 'garden'},
|
|
670
|
+
'park': {'leisure': 'park'},
|
|
671
|
+
|
|
672
|
+
# Shrub
|
|
673
|
+
'scrub': {'natural': 'scrub'},
|
|
674
|
+
'shrubland': {'natural': 'scrub'},
|
|
675
|
+
'bush': {'natural': 'scrub'},
|
|
676
|
+
'thicket': {'natural': 'scrub'},
|
|
677
|
+
|
|
678
|
+
# Agriculture land
|
|
679
|
+
'farmland': {'landuse': 'farmland'},
|
|
680
|
+
'orchard': {'landuse': 'orchard'},
|
|
681
|
+
'vineyard': {'landuse': 'vineyard'},
|
|
682
|
+
'plant_nursery': {'landuse': 'plant_nursery'},
|
|
683
|
+
'greenhouse_horticulture': {'landuse': 'greenhouse_horticulture'},
|
|
684
|
+
'flowerbed': {'landuse': 'flowerbed'},
|
|
685
|
+
'allotments': {'landuse': 'allotments'},
|
|
686
|
+
'cropland': {'landuse': 'farmland'},
|
|
687
|
+
|
|
688
|
+
# Tree
|
|
689
|
+
'wood': {'natural': 'wood'},
|
|
690
|
+
'forest': {'landuse': 'forest'},
|
|
691
|
+
'tree': {'natural': 'tree'},
|
|
692
|
+
'tree_row': {'natural': 'tree_row'},
|
|
693
|
+
'tree_canopy': {'natural': 'tree_canopy'},
|
|
694
|
+
|
|
695
|
+
# Moss and lichen
|
|
696
|
+
'moss': {'natural': 'fell'},
|
|
697
|
+
'lichen': {'natural': 'fell'},
|
|
698
|
+
'tundra_vegetation': {'natural': 'fell'},
|
|
699
|
+
|
|
700
|
+
# Wet land
|
|
701
|
+
'wetland': {'natural': 'wetland'},
|
|
702
|
+
'marsh': {'wetland': 'marsh'},
|
|
703
|
+
'swamp': {'wetland': 'swamp'},
|
|
704
|
+
'bog': {'wetland': 'bog'},
|
|
705
|
+
'fen': {'wetland': 'fen'},
|
|
706
|
+
'flooded_vegetation': {'natural': 'wetland'},
|
|
707
|
+
|
|
708
|
+
# Mangrove
|
|
709
|
+
'mangrove': {'natural': 'wetland', 'wetland': 'mangrove'},
|
|
710
|
+
'mangrove_forest': {'natural': 'wetland', 'wetland': 'mangrove'},
|
|
711
|
+
'mangrove_swamp': {'natural': 'wetland', 'wetland': 'mangrove'},
|
|
712
|
+
|
|
713
|
+
# Water
|
|
714
|
+
'water': {'natural': 'water'},
|
|
715
|
+
'waterway': {'waterway': '*'},
|
|
716
|
+
'reservoir': {'landuse': 'reservoir'},
|
|
717
|
+
'basin': {'landuse': 'basin'},
|
|
718
|
+
'bay': {'natural': 'bay'},
|
|
719
|
+
'ocean': {'natural': 'water', 'water': 'ocean'},
|
|
720
|
+
'sea': {'natural': 'water', 'water': 'sea'},
|
|
721
|
+
'river': {'waterway': 'river'},
|
|
722
|
+
'lake': {'natural': 'water', 'water': 'lake'},
|
|
723
|
+
|
|
724
|
+
# Snow and ice
|
|
725
|
+
'glacier': {'natural': 'glacier'},
|
|
726
|
+
'snow': {'natural': 'glacier'},
|
|
727
|
+
'ice': {'natural': 'glacier'},
|
|
728
|
+
'snowfield': {'natural': 'glacier'},
|
|
729
|
+
'ice_shelf': {'natural': 'glacier'},
|
|
730
|
+
|
|
731
|
+
# No Data
|
|
732
|
+
'unknown': {'FIXME': '*'},
|
|
733
|
+
'no_data': {'FIXME': '*'},
|
|
734
|
+
'clouds': {'natural': 'cloud'},
|
|
735
|
+
'undefined': {'FIXME': '*'}
|
|
736
|
+
}
|
|
737
|
+
|
|
738
|
+
def get_classification(tags):
|
|
739
|
+
"""Determine the classification code and name for a feature based on its OSM tags.
|
|
740
|
+
|
|
741
|
+
Args:
|
|
742
|
+
tags (dict): Dictionary of OSM tags
|
|
743
|
+
|
|
744
|
+
Returns:
|
|
745
|
+
tuple: (classification_code, classification_name) or (None, None) if no match
|
|
746
|
+
"""
|
|
747
|
+
# Iterate through each classification code and its associated info
|
|
748
|
+
for code, info in classification_mapping.items():
|
|
749
|
+
# Check each tag associated with this classification
|
|
750
|
+
for tag in info['tags']:
|
|
751
|
+
osm_mappings = tag_osm_key_value_mapping.get(tag)
|
|
752
|
+
if osm_mappings:
|
|
753
|
+
# Check if the feature's tags match any of the OSM key-value pairs
|
|
754
|
+
for key, value in osm_mappings.items():
|
|
755
|
+
if key in tags:
|
|
756
|
+
if value == '*' or tags[key] == value:
|
|
757
|
+
return code, info['name']
|
|
758
|
+
# Special case for islets and islands
|
|
759
|
+
if tag in ['islet', 'island'] and tags.get('place') == tag:
|
|
760
|
+
return code, info['name']
|
|
761
|
+
# Special case for roads mapped as areas
|
|
762
|
+
if 'area:highway' in tags:
|
|
763
|
+
return 11, 'Road'
|
|
764
|
+
return None, None
|
|
765
|
+
|
|
766
|
+
def swap_coordinates(geom_mapping):
|
|
767
|
+
"""Swap coordinates from (lon, lat) to (lat, lon) order.
|
|
768
|
+
|
|
769
|
+
Args:
|
|
770
|
+
geom_mapping (dict): GeoJSON geometry object
|
|
771
|
+
|
|
772
|
+
Returns:
|
|
773
|
+
dict: Geometry with swapped coordinates
|
|
774
|
+
"""
|
|
775
|
+
coords = geom_mapping['coordinates']
|
|
776
|
+
|
|
777
|
+
def swap_coords(coord_list):
|
|
778
|
+
# Recursively swap coordinates for nested lists
|
|
779
|
+
if isinstance(coord_list[0], (list, tuple)):
|
|
780
|
+
return [swap_coords(c) for c in coord_list]
|
|
781
|
+
else:
|
|
782
|
+
# Keep original order since already (lon, lat)
|
|
783
|
+
return coord_list
|
|
784
|
+
|
|
785
|
+
geom_mapping['coordinates'] = swap_coords(coords)
|
|
786
|
+
return geom_mapping
|
|
787
|
+
|
|
788
|
+
def load_land_cover_gdf_from_osm(rectangle_vertices_ori):
|
|
789
|
+
"""Load land cover data from OpenStreetMap within a given rectangular area.
|
|
790
|
+
|
|
791
|
+
Args:
|
|
792
|
+
rectangle_vertices_ori (list): List of (lon, lat) coordinates defining the rectangle
|
|
793
|
+
|
|
794
|
+
Returns:
|
|
795
|
+
GeoDataFrame: GeoDataFrame containing land cover classifications
|
|
796
|
+
"""
|
|
797
|
+
# Close the rectangle polygon by adding first vertex at the end
|
|
798
|
+
rectangle_vertices = rectangle_vertices_ori.copy()
|
|
799
|
+
rectangle_vertices.append(rectangle_vertices_ori[0])
|
|
800
|
+
|
|
801
|
+
# Instead of using poly:"lat lon lat lon...", use area coordinates
|
|
802
|
+
min_lat = min(lat for lon, lat in rectangle_vertices)
|
|
803
|
+
max_lat = max(lat for lon, lat in rectangle_vertices)
|
|
804
|
+
min_lon = min(lon for lon, lat in rectangle_vertices)
|
|
805
|
+
max_lon = max(lon for lon, lat in rectangle_vertices)
|
|
806
|
+
|
|
807
|
+
# Initialize dictionary to store OSM keys and their allowed values
|
|
808
|
+
osm_keys_values = defaultdict(list)
|
|
809
|
+
|
|
810
|
+
# Build mapping of OSM keys to their possible values from classification mapping
|
|
811
|
+
for info in classification_mapping.values():
|
|
812
|
+
tags = info['tags']
|
|
813
|
+
for tag in tags:
|
|
814
|
+
osm_mappings = tag_osm_key_value_mapping.get(tag)
|
|
815
|
+
if osm_mappings:
|
|
816
|
+
for key, value in osm_mappings.items():
|
|
817
|
+
if value == '*':
|
|
818
|
+
osm_keys_values[key] = ['*'] # Match all values
|
|
819
|
+
else:
|
|
820
|
+
if osm_keys_values[key] != ['*'] and value not in osm_keys_values[key]:
|
|
821
|
+
osm_keys_values[key].append(value)
|
|
822
|
+
|
|
823
|
+
# Build Overpass API query parts for each key-value pair
|
|
824
|
+
query_parts = []
|
|
825
|
+
for key, values in osm_keys_values.items():
|
|
826
|
+
if values:
|
|
827
|
+
if values == ['*']:
|
|
828
|
+
# Query for any value of this key using bounding box
|
|
829
|
+
query_parts.append(f'way["{key}"]({min_lat},{min_lon},{max_lat},{max_lon});')
|
|
830
|
+
query_parts.append(f'relation["{key}"]({min_lat},{min_lon},{max_lat},{max_lon});')
|
|
831
|
+
else:
|
|
832
|
+
# Remove duplicate values
|
|
833
|
+
values = list(set(values))
|
|
834
|
+
# Build regex pattern for specific values
|
|
835
|
+
values_regex = '|'.join(values)
|
|
836
|
+
query_parts.append(f'way["{key}"~"^{values_regex}$"]({min_lat},{min_lon},{max_lat},{max_lon});')
|
|
837
|
+
query_parts.append(f'relation["{key}"~"^{values_regex}$"]({min_lat},{min_lon},{max_lat},{max_lon});')
|
|
838
|
+
|
|
839
|
+
# Combine query parts into complete Overpass query
|
|
840
|
+
query_body = "\n ".join(query_parts)
|
|
841
|
+
query = (
|
|
842
|
+
"[out:json];\n"
|
|
843
|
+
"(\n"
|
|
844
|
+
f" {query_body}\n"
|
|
845
|
+
");\n"
|
|
846
|
+
"out body;\n"
|
|
847
|
+
">;\n"
|
|
848
|
+
"out skel qt;"
|
|
849
|
+
)
|
|
850
|
+
|
|
851
|
+
# Overpass API endpoint
|
|
852
|
+
overpass_url = "http://overpass-api.de/api/interpreter"
|
|
853
|
+
|
|
854
|
+
# Fetch data from Overpass API
|
|
855
|
+
print("Fetching data from Overpass API...")
|
|
856
|
+
response = requests.get(overpass_url, params={'data': query})
|
|
857
|
+
response.raise_for_status()
|
|
858
|
+
data = response.json()
|
|
859
|
+
|
|
860
|
+
# Convert OSM data to GeoJSON format using our custom converter instead of json2geojson
|
|
861
|
+
print("Converting data to GeoJSON format...")
|
|
862
|
+
geojson_data = osm_json_to_geojson(data)
|
|
863
|
+
|
|
864
|
+
# Create shapely polygon from rectangle vertices (in lon,lat order)
|
|
865
|
+
rectangle_polygon = Polygon(rectangle_vertices)
|
|
866
|
+
|
|
867
|
+
# Calculate center point for projection
|
|
868
|
+
center_lat = sum(lat for lon, lat in rectangle_vertices) / len(rectangle_vertices)
|
|
869
|
+
center_lon = sum(lon for lon, lat in rectangle_vertices) / len(rectangle_vertices)
|
|
870
|
+
|
|
871
|
+
# Set up coordinate reference systems for projection
|
|
872
|
+
wgs84 = pyproj.CRS('EPSG:4326') # Standard lat/lon
|
|
873
|
+
# Albers Equal Area projection centered on area of interest
|
|
874
|
+
aea = pyproj.CRS(proj='aea', lat_1=rectangle_polygon.bounds[1], lat_2=rectangle_polygon.bounds[3], lat_0=center_lat, lon_0=center_lon)
|
|
875
|
+
|
|
876
|
+
# Create transformers for projecting coordinates
|
|
877
|
+
project = pyproj.Transformer.from_crs(wgs84, aea, always_xy=True).transform
|
|
878
|
+
project_back = pyproj.Transformer.from_crs(aea, wgs84, always_xy=True).transform
|
|
879
|
+
|
|
880
|
+
# Lists to store geometries and properties for GeoDataFrame
|
|
881
|
+
geometries = []
|
|
882
|
+
properties = []
|
|
883
|
+
|
|
884
|
+
for feature in geojson_data['features']:
|
|
885
|
+
# Convert feature geometry to shapely object
|
|
886
|
+
geom = shape(feature['geometry'])
|
|
887
|
+
if not (geom.is_valid and geom.intersects(rectangle_polygon)):
|
|
888
|
+
continue
|
|
889
|
+
|
|
890
|
+
# Get classification for feature
|
|
891
|
+
tags = feature['properties'].get('tags', {})
|
|
892
|
+
classification_code, classification_name = get_classification(tags)
|
|
893
|
+
if classification_code is None:
|
|
894
|
+
continue
|
|
895
|
+
|
|
896
|
+
# Special handling for roads
|
|
897
|
+
if classification_code == 11:
|
|
898
|
+
highway_value = tags.get('highway', '')
|
|
899
|
+
# Skip minor paths and walkways
|
|
900
|
+
if highway_value in ['footway', 'path', 'pedestrian', 'steps', 'cycleway', 'bridleway']:
|
|
901
|
+
continue
|
|
902
|
+
|
|
903
|
+
# Determine road width for buffering
|
|
904
|
+
width_value = tags.get('width')
|
|
905
|
+
lanes_value = tags.get('lanes')
|
|
906
|
+
buffer_distance = None
|
|
907
|
+
|
|
908
|
+
# Calculate buffer distance based on width or number of lanes
|
|
909
|
+
if width_value is not None:
|
|
910
|
+
try:
|
|
911
|
+
width_meters = float(width_value)
|
|
912
|
+
buffer_distance = width_meters / 2
|
|
913
|
+
except ValueError:
|
|
914
|
+
pass
|
|
915
|
+
elif lanes_value is not None:
|
|
916
|
+
try:
|
|
917
|
+
num_lanes = float(lanes_value)
|
|
918
|
+
width_meters = num_lanes * 3.0 # 3m per lane
|
|
919
|
+
buffer_distance = width_meters / 2
|
|
920
|
+
except ValueError:
|
|
921
|
+
pass
|
|
922
|
+
else:
|
|
923
|
+
# Default road width
|
|
924
|
+
buffer_distance = 2.5 # 5m total width
|
|
925
|
+
|
|
926
|
+
if buffer_distance is None:
|
|
927
|
+
continue
|
|
928
|
+
|
|
929
|
+
# Buffer line features to create polygons
|
|
930
|
+
if geom.geom_type in ['LineString', 'MultiLineString']:
|
|
931
|
+
# Project to planar CRS, buffer, and project back
|
|
932
|
+
geom_proj = transform(project, geom)
|
|
933
|
+
buffered_geom_proj = geom_proj.buffer(buffer_distance)
|
|
934
|
+
buffered_geom = transform(project_back, buffered_geom_proj)
|
|
935
|
+
# Clip to rectangle
|
|
936
|
+
geom = buffered_geom.intersection(rectangle_polygon)
|
|
937
|
+
else:
|
|
938
|
+
continue
|
|
939
|
+
|
|
940
|
+
# Skip empty geometries
|
|
941
|
+
if geom.is_empty:
|
|
942
|
+
continue
|
|
943
|
+
|
|
944
|
+
# Add geometries and properties
|
|
945
|
+
if geom.geom_type == 'Polygon':
|
|
946
|
+
geometries.append(geom)
|
|
947
|
+
properties.append({'class': classification_name})
|
|
948
|
+
elif geom.geom_type == 'MultiPolygon':
|
|
949
|
+
for poly in geom.geoms:
|
|
950
|
+
geometries.append(poly)
|
|
951
|
+
properties.append({'class': classification_name})
|
|
952
|
+
|
|
953
|
+
# Create GeoDataFrame
|
|
954
|
+
gdf = gpd.GeoDataFrame(properties, geometry=geometries, crs="EPSG:4326")
|
|
647
955
|
return gdf
|