wolfhece 2.1.25__py3-none-any.whl → 2.1.27__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- wolfhece/PyPalette.py +26 -10
- wolfhece/acceptability/Parallels.py +44 -19
- wolfhece/acceptability/acceptability.py +187 -130
- wolfhece/acceptability/func.py +538 -169
- wolfhece/apps/version.py +1 -1
- wolfhece/libs/WolfOGL.c +16164 -2680
- wolfhece/libs/WolfOGL.pyx +357 -0
- wolfhece/libs/wolfogl.cp310-win_amd64.pyd +0 -0
- wolfhece/pyviews.py +3 -3
- wolfhece/wolf_array.py +394 -157
- wolfhece/wolfresults_2D.py +2 -2
- {wolfhece-2.1.25.dist-info → wolfhece-2.1.27.dist-info}/METADATA +2 -1
- {wolfhece-2.1.25.dist-info → wolfhece-2.1.27.dist-info}/RECORD +16 -18
- wolfhece/libs/wolfogl.cp39-win_amd64.pyd +0 -0
- wolfhece/libs/wolfpy.cp39-win_amd64.pyd +0 -0
- {wolfhece-2.1.25.dist-info → wolfhece-2.1.27.dist-info}/WHEEL +0 -0
- {wolfhece-2.1.25.dist-info → wolfhece-2.1.27.dist-info}/entry_points.txt +0 -0
- {wolfhece-2.1.25.dist-info → wolfhece-2.1.27.dist-info}/top_level.txt +0 -0
wolfhece/acceptability/func.py
CHANGED
@@ -7,6 +7,43 @@ import glob
|
|
7
7
|
from pathlib import Path
|
8
8
|
import logging
|
9
9
|
from tqdm import tqdm
|
10
|
+
from pyogrio import list_layers, read_dataframe
|
11
|
+
from enum import Enum
|
12
|
+
import numba as nb
|
13
|
+
from numba import cuda
|
14
|
+
|
15
|
+
ENGINE = 'pyogrio' # or 'Fiona -- Pyogrio is faster
|
16
|
+
EXTENT = '.gpkg'
|
17
|
+
class Modif_Type(Enum):
|
18
|
+
"""
|
19
|
+
Enum class for the type of modification
|
20
|
+
"""
|
21
|
+
|
22
|
+
WALOUS = 'Walous layers changed to PICC buidings'
|
23
|
+
POINT2POLY_EPURATION = 'Change BDREF based on AJOUT_PDET sent by Perrine (SPI)'
|
24
|
+
POINT2POLY_PICC = 'Convert the points to polygons based on PICC'
|
25
|
+
POINT2POLY_CAPAPICC = 'Convert the points to polygons based on PICC and CaPa'
|
26
|
+
INHABITED = 'Select only inhabited buildings'
|
27
|
+
ROAD = 'Create a buffer around the roads'
|
28
|
+
COPY = 'Copy the data'
|
29
|
+
|
30
|
+
class Vulnerability_csv():
|
31
|
+
|
32
|
+
def __init__(self, file:Path) -> None:
|
33
|
+
self.file = file
|
34
|
+
self.data = pd.read_csv(file, sep=",", encoding='latin-1')
|
35
|
+
|
36
|
+
def get_layers(self) -> list:
|
37
|
+
return [a[1] for a in self.data["Path"].str.split('/')]
|
38
|
+
|
39
|
+
def get_vulnerability_level(self, layer:str) -> str:
|
40
|
+
idx = self.get_layers().index(layer)
|
41
|
+
return self.data.iloc[idx]["Vulne"]
|
42
|
+
|
43
|
+
def get_vulnerability_code(self, layer:str) -> str:
|
44
|
+
idx = self.get_layers().index(layer)
|
45
|
+
return self.data.iloc[idx]["Code"]
|
46
|
+
|
10
47
|
|
11
48
|
def get_data_type(fname:Path):
|
12
49
|
|
@@ -97,6 +134,7 @@ class Accept_Manager():
|
|
97
134
|
CaPa_Walloon:str = 'Cadastre_Walloon.gpkg',
|
98
135
|
PICC_Walloon:str = 'PICC_vDIFF.gdb',
|
99
136
|
CE_IGN_top10v:str = 'CE_IGN_TOP10V/CE_IGN_TOP10V.shp',
|
137
|
+
EPU_Stations:str = 'AJOUT_PDET_EPU_DG03_STATIONS.shp'
|
100
138
|
) -> None:
|
101
139
|
|
102
140
|
self.old_dir:Path = Path(os.getcwd())
|
@@ -123,11 +161,13 @@ class Accept_Manager():
|
|
123
161
|
self.IN_STUDY_AREA = self.IN_DIR / "STUDY_AREA"
|
124
162
|
self.IN_CSV = self.IN_DIR / "CSVs"
|
125
163
|
self.IN_WATER_DEPTH = self.IN_DIR / "WATER_DEPTH"
|
164
|
+
self.IN_EPU_STATIONS= self.IN_DIR / "EPU_STATIONS_NEW"
|
126
165
|
|
127
166
|
self.ORIGINAL_GDB = self.IN_DATABASE / self._original_gdb
|
128
167
|
self.CAPA_WALLOON = self.IN_DATABASE / self._capa_walloon
|
129
168
|
self.PICC_WALLOON = self.IN_DATABASE / self._picc_walloon
|
130
169
|
self.CE_IGN_TOP10V = self.IN_DATABASE / self._ce_ign_top10v
|
170
|
+
self.EPU_STATIONS = self.IN_EPU_STATIONS / EPU_Stations
|
131
171
|
|
132
172
|
self.VULNERABILITY_CSV = self.IN_CSV / "Vulnerability.csv"
|
133
173
|
self.POINTS_CSV = self.IN_CSV / "Intermediate.csv"
|
@@ -136,26 +176,39 @@ class Accept_Manager():
|
|
136
176
|
self._CSVs = [self.VULNERABILITY_CSV, self.POINTS_CSV] #, self.PONDERATION_CSV]
|
137
177
|
self._GPKGs= [self.CAPA_WALLOON, self.PICC_WALLOON]
|
138
178
|
self._GDBs = [self.ORIGINAL_GDB]
|
139
|
-
self._SHPs = [self.CE_IGN_TOP10V]
|
179
|
+
self._SHPs = [self.CE_IGN_TOP10V, self.EPU_STATIONS]
|
140
180
|
self._ALLS = self._CSVs + self._GPKGs + self._GDBs + self._SHPs
|
141
181
|
|
142
182
|
self.TMP_DIR = self.main_dir / "TEMP"
|
143
183
|
|
144
|
-
self.TMP_DATABASE = self.TMP_DIR / "DATABASES"
|
145
|
-
|
146
184
|
self.OUT_DIR = self.main_dir / "OUTPUT"
|
147
185
|
|
186
|
+
self.points2polys = []
|
187
|
+
self.lines2polys = []
|
188
|
+
|
148
189
|
self.create_paths()
|
149
190
|
self.create_paths_scenario()
|
150
191
|
|
151
192
|
def create_paths(self):
|
152
193
|
""" Create the paths for the directories and files """
|
153
194
|
|
195
|
+
self.points2polys = []
|
196
|
+
self.lines2polys = []
|
197
|
+
|
154
198
|
if self._study_area is not None:
|
155
199
|
|
156
200
|
self.Study_area:Path = Path(self._study_area)
|
157
201
|
|
158
|
-
self.TMP_STUDYAREA = self.
|
202
|
+
self.TMP_STUDYAREA = self.TMP_DIR / self.Study_area.stem
|
203
|
+
self.TMP_DATABASE = self.TMP_STUDYAREA / "DATABASES"
|
204
|
+
|
205
|
+
self.TMP_CLIPGDB = self.TMP_DATABASE / "CLIP_GDB"
|
206
|
+
self.TMP_CADASTER = self.TMP_DATABASE / "CLIP_CADASTER"
|
207
|
+
self.TMP_PICC = self.TMP_DATABASE / "CLIP_PICC"
|
208
|
+
self.TMP_IGNCE = self.TMP_DATABASE / "CLIP_IGN_CE"
|
209
|
+
self.TMP_WMODIF = self.TMP_DATABASE / "WITH_MODIF"
|
210
|
+
self.TMP_CODEVULNE = self.TMP_DATABASE / "CODE_VULNE"
|
211
|
+
|
159
212
|
self.TMP_VULN_DIR = self.TMP_STUDYAREA / "VULNERABILITY"
|
160
213
|
self.TMP_RASTERS = self.TMP_VULN_DIR / "RASTERS"
|
161
214
|
self.TMP_RASTERS_CODE = self.TMP_RASTERS / "Code"
|
@@ -164,12 +217,13 @@ class Accept_Manager():
|
|
164
217
|
self.OUT_STUDY_AREA = self.OUT_DIR / self.Study_area.stem
|
165
218
|
|
166
219
|
self.SA = self.IN_STUDY_AREA / self.Study_area
|
167
|
-
|
168
|
-
self.
|
169
|
-
self.
|
220
|
+
|
221
|
+
# self.SA_DATABASE = self.TMP_STUDYAREA / "database.gpkg"
|
222
|
+
# self.SA_CAPA = self.TMP_STUDYAREA / "CaPa.gpkg"
|
223
|
+
# self.SA_PICC = self.TMP_STUDYAREA / "PICC.gpkg"
|
170
224
|
self.SA_FINAL = self.TMP_STUDYAREA / "database_final.gpkg"
|
171
225
|
self.SA_FINAL_V = self.TMP_STUDYAREA / "database_final_V.gpkg"
|
172
|
-
self.SA_MASKED_RIVER = self.
|
226
|
+
self.SA_MASKED_RIVER = self.TMP_IGNCE / "CE_IGN_TOP10V.tiff"
|
173
227
|
|
174
228
|
self.SA_VULN = self.TMP_VULN_DIR / "Vulnerability.tiff"
|
175
229
|
self.SA_CODE = self.TMP_VULN_DIR / "Vulnerability_Code.tiff"
|
@@ -179,6 +233,12 @@ class Accept_Manager():
|
|
179
233
|
self._scenario = None
|
180
234
|
|
181
235
|
self.TMP_STUDYAREA = None
|
236
|
+
self.TMP_DATABASE = None
|
237
|
+
self.TMP_CADASTER = None
|
238
|
+
self.TMP_PICC = None
|
239
|
+
self.TMP_IGNCE = None
|
240
|
+
self.TMP_WMODIF = None
|
241
|
+
self.TMP_CODEVULNE = None
|
182
242
|
self.TMP_VULN_DIR = None
|
183
243
|
self.TMP_RASTERS = None
|
184
244
|
self.TMP_RASTERS_CODE = None
|
@@ -204,7 +264,7 @@ class Accept_Manager():
|
|
204
264
|
self.check_outputs()
|
205
265
|
|
206
266
|
def create_paths_scenario(self):
|
207
|
-
|
267
|
+
|
208
268
|
if self._scenario is not None:
|
209
269
|
|
210
270
|
self.scenario:str = str(self._scenario)
|
@@ -300,10 +360,25 @@ class Accept_Manager():
|
|
300
360
|
logging.error("The scenario does not exist in the water depth directory")
|
301
361
|
|
302
362
|
def get_files_in_rm_buildings(self) -> list[Path]:
|
303
|
-
return [Path(a) for a in glob.glob(str(self.IN_RM_BUILD_DIR / "
|
363
|
+
return [Path(a) for a in glob.glob(str(self.IN_RM_BUILD_DIR / ("*"+ EXTENT)))]
|
304
364
|
|
305
365
|
def get_files_in_rasters_vulne(self) -> list[Path]:
|
306
366
|
return [Path(a) for a in glob.glob(str(self.TMP_RASTERS_VULNE / "*.tiff"))]
|
367
|
+
|
368
|
+
def get_layers_in_gdb(self) -> list[str]:
|
369
|
+
return [a[0] for a in list_layers(str(self.ORIGINAL_GDB))]
|
370
|
+
|
371
|
+
def get_layer_types_in_gdb(self) -> list[str]:
|
372
|
+
return [a[1] for a in list_layers(str(self.ORIGINAL_GDB))]
|
373
|
+
|
374
|
+
def get_layers_in_clipgdb(self) -> list[str]:
|
375
|
+
return [Path(a).stem for a in glob.glob(str(self.TMP_CLIPGDB / ("*"+ EXTENT)))]
|
376
|
+
|
377
|
+
def get_layers_in_wmodif(self) -> list[str]:
|
378
|
+
return [Path(a).stem for a in glob.glob(str(self.TMP_WMODIF / ("*"+ EXTENT)))]
|
379
|
+
|
380
|
+
def get_layers_in_codevulne(self) -> list[str]:
|
381
|
+
return [Path(a).stem for a in glob.glob(str(self.TMP_CODEVULNE / ("*"+ EXTENT)))]
|
307
382
|
|
308
383
|
def get_files_in_rasters_code(self) -> list[Path]:
|
309
384
|
return [Path(a) for a in glob.glob(str(self.TMP_RASTERS_CODE / "*.tiff"))]
|
@@ -343,23 +418,114 @@ class Accept_Manager():
|
|
343
418
|
return cursim
|
344
419
|
|
345
420
|
return None
|
421
|
+
|
422
|
+
def get_types_in_file(self, file:str) -> list[str]:
|
423
|
+
""" Get the types of the geometries in the Shape file """
|
424
|
+
|
425
|
+
return [a[1] for a in list_layers(str(file))]
|
426
|
+
|
427
|
+
def is_type_unique(self, file:str) -> bool:
|
428
|
+
""" Check if the file contains only one type of geometry """
|
429
|
+
|
430
|
+
types = self.get_types_in_file(file)
|
431
|
+
return len(types) == 1
|
432
|
+
|
433
|
+
def is_polygons(self, set2test:set) -> bool:
|
434
|
+
""" Check if the set contains only polygons """
|
435
|
+
|
436
|
+
set2test = list(set2test)
|
437
|
+
firstone = set2test[0]
|
438
|
+
if 'Polygon' in firstone:
|
439
|
+
for curtype in set2test:
|
440
|
+
if 'Polygon' not in curtype:
|
441
|
+
return False
|
442
|
+
return True
|
443
|
+
else:
|
444
|
+
return False
|
445
|
+
|
446
|
+
def is_same_types(self, file:str) -> tuple[bool, str]:
|
447
|
+
""" Check if the file contains only the same type of geometry """
|
448
|
+
|
449
|
+
types = self.get_types_in_file(file)
|
450
|
+
|
451
|
+
if len(types) == 1:
|
452
|
+
if 'Point' in types[0]:
|
453
|
+
return True, 'Point'
|
454
|
+
elif 'Polygon' in types[0]:
|
455
|
+
return True, 'Polygon'
|
456
|
+
elif 'LineString' in types[0]:
|
457
|
+
return True, 'LineString'
|
458
|
+
else:
|
459
|
+
raise ValueError(f"The type of geometry {types[0]} is not recognized")
|
460
|
+
else:
|
461
|
+
firstone = types[0]
|
462
|
+
if 'Point' in firstone:
|
463
|
+
for curtype in types:
|
464
|
+
if 'Point' not in curtype:
|
465
|
+
return False, None
|
466
|
+
return True, 'Point'
|
467
|
+
|
468
|
+
elif 'Polygon' in firstone:
|
469
|
+
for curtype in types:
|
470
|
+
if 'Polygon' not in curtype:
|
471
|
+
return False, None
|
472
|
+
|
473
|
+
return True, 'Polygon'
|
474
|
+
|
475
|
+
elif 'LineString' in firstone:
|
476
|
+
for curtype in types:
|
477
|
+
if 'LineString' not in curtype:
|
478
|
+
return False, None
|
479
|
+
|
480
|
+
return True, 'LineString'
|
481
|
+
else:
|
482
|
+
raise ValueError(f"The type of geometry {firstone} is not recognized")
|
483
|
+
|
346
484
|
|
347
485
|
def get_return_periods(self) -> list[int]:
|
486
|
+
"""
|
487
|
+
Get the return periods from the simulations
|
488
|
+
|
489
|
+
:return list[int]: the **sorted list** of return periods
|
490
|
+
"""
|
348
491
|
|
492
|
+
# List files in directory
|
349
493
|
sims = self.get_sims_files_for_scenario()
|
350
494
|
|
351
495
|
if len(sims)==0:
|
352
496
|
logging.error("No simulations found")
|
353
497
|
return None
|
354
498
|
|
499
|
+
# Two cases:
|
500
|
+
# - Return periods are named as T2.tif, T5.tif, T10.tif, ...
|
501
|
+
# - Return periods are named as *_T2_h.tif, *_T5_h.tif, *_T10_h.tif, ...
|
355
502
|
if "_h.tif" in sims[0].name:
|
356
|
-
|
357
|
-
|
358
|
-
|
503
|
+
|
504
|
+
# Searching for the position of the return period in the name
|
505
|
+
idx_T = [cursim.name.find("_T") for cursim in sims]
|
506
|
+
idx_h = [cursim.name.find("_h.tif") for cursim in sims]
|
507
|
+
|
508
|
+
assert len(idx_T) == len(idx_h), "The number of T and h are not the same"
|
509
|
+
for curT, curh in zip(idx_T, idx_h):
|
510
|
+
assert curT != -1, "The T is not found"
|
511
|
+
assert curh != -1, "The h is not found"
|
512
|
+
assert curh > curT, "The h is before the T"
|
513
|
+
|
514
|
+
# Create the list of return periods -- only the numeric part
|
515
|
+
sims = [int(cursim.name[idx_T[i]+2:idx_h[i]]) for i, cursim in enumerate(sims)]
|
359
516
|
else:
|
360
|
-
|
361
|
-
|
362
|
-
|
517
|
+
# searching for the position of the return period in the name
|
518
|
+
idx_T = [cursim.name.find("T") for cursim in sims]
|
519
|
+
idx_h = [cursim.name.find(".tif") for cursim in sims]
|
520
|
+
|
521
|
+
assert len(idx_T) == len(idx_h), "The number of T and h are not the same"
|
522
|
+
for curT, curh in zip(idx_T, idx_h):
|
523
|
+
assert curT != -1, "The T is not found"
|
524
|
+
assert curh != -1, "The h is not found"
|
525
|
+
assert curh > curT, "The h is before the T"
|
526
|
+
|
527
|
+
# create the list of return periods -- only the numeric part
|
528
|
+
sims = [int(cursim.name[idx_T[i]+1:idx_h[i]]) for i, cursim in enumerate(sims)]
|
363
529
|
|
364
530
|
return sorted(sims)
|
365
531
|
|
@@ -376,8 +542,12 @@ class Accept_Manager():
|
|
376
542
|
|
377
543
|
pond.append(1./float(rt[0]) + (1./float(rt[0]) - 1./float(rt[1]))/2.)
|
378
544
|
for i in range(1, len(rt)-1):
|
545
|
+
# Full formula
|
379
546
|
# pond.append((1./float(rt[i-1]) - 1./float(rt[i]))/2. + (1./float(rt[i]) - 1./float(rt[i+1]))/2.)
|
547
|
+
|
548
|
+
# More compact formula
|
380
549
|
pond.append((1./float(rt[i-1]) - 1./float(rt[i+1]))/2.)
|
550
|
+
|
381
551
|
pond.append(1./float(rt[-1]) + (1./float(rt[-2]) - 1./float(rt[-1]))/2.)
|
382
552
|
|
383
553
|
return pd.DataFrame(pond, columns=["Ponderation"], index=rt)
|
@@ -419,6 +589,10 @@ class Accept_Manager():
|
|
419
589
|
logging.error("INPUT : The water depth directory does not exist")
|
420
590
|
err = True
|
421
591
|
|
592
|
+
if not self.IN_EPU_STATIONS.exists():
|
593
|
+
logging.error("INPUT : The EPU stations directory does not exist")
|
594
|
+
err = True
|
595
|
+
|
422
596
|
if self.Study_area is not None:
|
423
597
|
if not self.SA.exists():
|
424
598
|
logging.error("INPUT : The study area file does not exist")
|
@@ -441,7 +615,7 @@ class Accept_Manager():
|
|
441
615
|
err = True
|
442
616
|
|
443
617
|
if self.scenario is None:
|
444
|
-
logging.
|
618
|
+
logging.debug("The scenario has not been defined")
|
445
619
|
else:
|
446
620
|
if not self.IN_SCEN_DIR.exists():
|
447
621
|
logging.error("The scenario directory does not exist")
|
@@ -457,11 +631,20 @@ class Accept_Manager():
|
|
457
631
|
"""
|
458
632
|
|
459
633
|
self.TMP_DIR.mkdir(parents=True, exist_ok=True)
|
460
|
-
self.TMP_DATABASE.mkdir(parents=True, exist_ok=True)
|
461
634
|
|
462
635
|
if self.Study_area is not None:
|
463
636
|
self.TMP_STUDYAREA.mkdir(parents=True, exist_ok=True)
|
637
|
+
self.TMP_DATABASE.mkdir(parents=True, exist_ok=True)
|
638
|
+
self.TMP_CLIPGDB.mkdir(parents=True, exist_ok=True)
|
639
|
+
self.TMP_CADASTER.mkdir(parents=True, exist_ok=True)
|
640
|
+
self.TMP_WMODIF.mkdir(parents=True, exist_ok=True)
|
641
|
+
self.TMP_CODEVULNE.mkdir(parents=True, exist_ok=True)
|
642
|
+
self.TMP_PICC.mkdir(parents=True, exist_ok=True)
|
643
|
+
self.TMP_IGNCE.mkdir(parents=True, exist_ok=True)
|
464
644
|
self.TMP_VULN_DIR.mkdir(parents=True, exist_ok=True)
|
645
|
+
self.TMP_RASTERS.mkdir(parents=True, exist_ok=True)
|
646
|
+
self.TMP_RASTERS_CODE.mkdir(parents=True, exist_ok=True)
|
647
|
+
self.TMP_RASTERS_VULNE.mkdir(parents=True, exist_ok=True)
|
465
648
|
|
466
649
|
if self.scenario is not None:
|
467
650
|
self.TMP_SCEN_DIR.mkdir(parents=True, exist_ok=True)
|
@@ -533,12 +716,12 @@ class Accept_Manager():
|
|
533
716
|
|
534
717
|
def check_before_rasterize(self) -> bool:
|
535
718
|
|
536
|
-
if not self.
|
719
|
+
if not self.TMP_CODEVULNE.exists():
|
537
720
|
logging.error("The final database with vulnerability levels does not exist")
|
538
721
|
return False
|
539
722
|
|
540
|
-
if not self.
|
541
|
-
logging.error("The
|
723
|
+
if not self.TMP_WMODIF.exists():
|
724
|
+
logging.error("The vector data with modifications does not exist")
|
542
725
|
return False
|
543
726
|
|
544
727
|
return True
|
@@ -587,44 +770,145 @@ class Accept_Manager():
|
|
587
770
|
|
588
771
|
return True
|
589
772
|
|
590
|
-
|
773
|
+
def compare_original_clipped_layers(self) -> str:
|
774
|
+
""" Compare the original layers with the clipped ones """
|
775
|
+
|
776
|
+
layers = self.get_layers_in_gdb()
|
777
|
+
layers_clip = self.get_layers_in_clipgdb()
|
778
|
+
|
779
|
+
ret = 'These layers have not been clipped:\n'
|
780
|
+
for layer in layers:
|
781
|
+
if layer not in layers_clip:
|
782
|
+
ret += " - {}\n".format(layer)
|
783
|
+
|
784
|
+
ret += '\nThese layers have been clipped but are not present in the GDB:\n'
|
785
|
+
for layer in layers_clip:
|
786
|
+
if layer not in layers:
|
787
|
+
ret += " - {}\n".format(layer)
|
788
|
+
|
789
|
+
ret+='\n'
|
790
|
+
|
791
|
+
return ret
|
792
|
+
|
793
|
+
def compare_clipped_raster_layers(self) -> str:
|
794
|
+
""" Compare the clipped layers with the rasterized ones """
|
795
|
+
|
796
|
+
layers = self.get_layers_in_clipgdb()
|
797
|
+
layers_rast = self.get_layers_in_codevulne()
|
798
|
+
|
799
|
+
ret = 'These layers {} have not been rasterized:\n'
|
800
|
+
for layer in layers:
|
801
|
+
if layer not in layers_rast:
|
802
|
+
ret += " - {}\n".format(layer)
|
803
|
+
|
804
|
+
ret += '\nThese layers have been rasterized but are not in the orginal GDB:\n'
|
805
|
+
for layer in layers_rast:
|
806
|
+
if layer not in layers:
|
807
|
+
ret += " - {}\n".format(layer)
|
808
|
+
|
809
|
+
ret+='\n'
|
810
|
+
|
811
|
+
return ret
|
812
|
+
|
813
|
+
def get_operand(self, file:str) -> Modif_Type:
|
814
|
+
""" Get the operand based on the layer name """
|
815
|
+
LAYERS_WALOUS = ["WALOUS_2018_LB72_112",
|
816
|
+
"WALOUS_2018_LB72_31",
|
817
|
+
"WALOUS_2018_LB72_32",
|
818
|
+
"WALOUS_2018_LB72_331",
|
819
|
+
"WALOUS_2018_LB72_332",
|
820
|
+
"WALOUS_2018_LB72_333",
|
821
|
+
"WALOUS_2018_LB72_34"]
|
822
|
+
|
823
|
+
ret, curtype = self.is_same_types(file)
|
824
|
+
layer = Path(file).stem
|
825
|
+
|
826
|
+
if not ret:
|
827
|
+
raise ValueError("The layer contains different types of geometries")
|
828
|
+
|
829
|
+
if layer in LAYERS_WALOUS:
|
830
|
+
return Modif_Type.WALOUS
|
831
|
+
|
832
|
+
elif curtype=="Point":
|
833
|
+
|
834
|
+
self.points2polys.append(layer)
|
835
|
+
|
836
|
+
if layer =="BDREF_DGO3_PASH__SCHEMA_STATIONS_EPU":
|
837
|
+
return Modif_Type.POINT2POLY_EPURATION
|
838
|
+
elif layer =="INFRASIG_SOINS_SANTE__ETAB_AINES":
|
839
|
+
return Modif_Type.POINT2POLY_PICC
|
840
|
+
else:
|
841
|
+
return Modif_Type.POINT2POLY_CAPAPICC
|
842
|
+
|
843
|
+
elif layer =="Hab_2018_CABU":
|
844
|
+
return Modif_Type.INHABITED
|
845
|
+
|
846
|
+
elif layer =="INFRASIG_ROUTE_RES_ROUTIER_TE_AXES":
|
847
|
+
|
848
|
+
self.lines2polys.append(layer)
|
849
|
+
|
850
|
+
return Modif_Type.ROAD
|
851
|
+
|
852
|
+
else:
|
853
|
+
return Modif_Type.COPY
|
854
|
+
|
591
855
|
|
592
|
-
def
|
856
|
+
def clip_layer(layer:str,
|
593
857
|
file_path:str,
|
594
858
|
Study_Area:str,
|
595
|
-
|
859
|
+
output_dir:str):
|
596
860
|
"""
|
597
|
-
Clip the input data based on the selected bassin and saves it
|
861
|
+
Clip the input data based on the selected bassin and saves it
|
862
|
+
in separate shape files.
|
863
|
+
|
864
|
+
As shape file doen not support DateTime, the columns with DateTime
|
865
|
+
are converted to string.
|
598
866
|
|
599
867
|
:param layer: the layer name in the GDB file
|
600
868
|
:param file_path: the path to the GDB file
|
601
869
|
:param Study_Area: the path to the study area shapefile
|
602
|
-
:param
|
870
|
+
:param output_dir: the path to the output directory
|
603
871
|
"""
|
604
872
|
|
605
873
|
layer = str(layer)
|
606
874
|
file_path = str(file_path)
|
607
875
|
Study_Area = str(Study_Area)
|
608
|
-
|
876
|
+
output_dir = Path(output_dir)
|
609
877
|
|
610
|
-
St_Area = gpd.read_file(Study_Area)
|
878
|
+
St_Area = gpd.read_file(Study_Area, engine=ENGINE)
|
611
879
|
|
612
880
|
logging.info(layer)
|
613
881
|
|
614
882
|
# The data is clipped during the reading
|
615
883
|
# **It is more efficient than reading the entire data and then clipping it**
|
616
|
-
|
884
|
+
#
|
885
|
+
# FIXME: "read_dataframe" is used directly rather than "gpd.read_file" cause
|
886
|
+
# the "layer" parameter is well transmitted to the "read_dataframe" function...
|
887
|
+
df:gpd.GeoDataFrame = read_dataframe(file_path, layer=layer, mask=St_Area['geometry'][0])
|
888
|
+
|
889
|
+
if len(df) == 0:
|
890
|
+
logging.warning("No data found for layer " + str(layer))
|
891
|
+
return "No data found for layer " + str(layer)
|
617
892
|
|
618
893
|
# Force Lambert72 -> EPSG:31370
|
619
894
|
df.to_crs("EPSG:31370", inplace=True)
|
620
|
-
|
621
|
-
|
622
|
-
|
623
|
-
|
624
|
-
|
625
|
-
|
626
|
-
|
627
|
-
|
895
|
+
try:
|
896
|
+
date_columns = df.select_dtypes(include=['datetimetz']).columns.tolist()
|
897
|
+
if len(date_columns)>0:
|
898
|
+
df[date_columns] = df[date_columns].astype(str)
|
899
|
+
|
900
|
+
df.to_file(str(output_dir / (layer+EXTENT)), mode='w', engine=ENGINE)
|
901
|
+
except Exception as e:
|
902
|
+
logging.error("Error while saving the clipped " + str(layer) + " to file")
|
903
|
+
logging.error(e)
|
904
|
+
pass
|
905
|
+
|
906
|
+
logging.info("Saved the clipped " + str(layer) + " to file")
|
907
|
+
return "Saved the clipped " +str(layer)+ " to file"
|
908
|
+
|
909
|
+
|
910
|
+
def data_modification(layer:str,
|
911
|
+
manager:Accept_Manager,
|
628
912
|
picc:gpd.GeoDataFrame,
|
629
913
|
capa:gpd.GeoDataFrame ):
|
630
914
|
"""
|
@@ -642,93 +926,154 @@ def data_modification(input_database:str,
|
|
642
926
|
df1:gpd.GeoDataFrame
|
643
927
|
df2:gpd.GeoDataFrame
|
644
928
|
|
645
|
-
LAYERS_WALOUS = ["WALOUS_2018_LB72_112",
|
646
|
-
"WALOUS_2018_LB72_31",
|
647
|
-
"WALOUS_2018_LB72_32",
|
648
|
-
"WALOUS_2018_LB72_331",
|
649
|
-
"WALOUS_2018_LB72_332",
|
650
|
-
"WALOUS_2018_LB72_333",
|
651
|
-
"WALOUS_2018_LB72_34"]
|
652
|
-
|
653
|
-
input_database = str(input_database)
|
654
929
|
layer = str(layer)
|
655
|
-
|
656
|
-
|
657
|
-
|
658
|
-
|
659
|
-
|
660
|
-
|
661
|
-
|
662
|
-
|
663
|
-
|
664
|
-
|
665
|
-
|
666
|
-
|
667
|
-
|
668
|
-
|
669
|
-
|
670
|
-
|
671
|
-
|
672
|
-
|
673
|
-
|
674
|
-
|
675
|
-
|
676
|
-
|
677
|
-
|
678
|
-
|
679
|
-
|
680
|
-
|
681
|
-
|
682
|
-
|
683
|
-
|
684
|
-
|
685
|
-
|
686
|
-
|
687
|
-
|
688
|
-
|
689
|
-
|
690
|
-
|
691
|
-
|
692
|
-
|
693
|
-
|
694
|
-
|
695
|
-
|
696
|
-
|
697
|
-
|
698
|
-
|
699
|
-
|
700
|
-
|
701
|
-
|
702
|
-
|
703
|
-
|
704
|
-
|
705
|
-
|
706
|
-
|
707
|
-
|
708
|
-
|
709
|
-
|
710
|
-
|
711
|
-
|
712
|
-
|
713
|
-
|
714
|
-
|
715
|
-
|
716
|
-
|
717
|
-
|
718
|
-
|
719
|
-
|
720
|
-
|
721
|
-
|
722
|
-
|
723
|
-
|
930
|
+
|
931
|
+
dir_input = manager.TMP_CLIPGDB
|
932
|
+
dir_output = manager.TMP_WMODIF
|
933
|
+
|
934
|
+
input_file = str(dir_input / (layer + EXTENT))
|
935
|
+
output_file = str(dir_output / (layer + EXTENT))
|
936
|
+
|
937
|
+
# Read the data
|
938
|
+
df:gpd.GeoDataFrame = gpd.read_file(input_file, engine=ENGINE)
|
939
|
+
nblines, _ = df.shape
|
940
|
+
|
941
|
+
if nblines>0:
|
942
|
+
op = manager.get_operand(input_file)
|
943
|
+
|
944
|
+
if op == Modif_Type.WALOUS:
|
945
|
+
# Walous layers changed to PICC buidings
|
946
|
+
|
947
|
+
assert picc.crs == df.crs, "CRS of PICC and input data do not match"
|
948
|
+
|
949
|
+
assert "GEOREF_ID" in picc.columns, "The PICC file does not contain the GEOREF_ID column"
|
950
|
+
assert "NATUR_CODE" in picc.columns, "The PICC file does not contain the NATUR_CODE column"
|
951
|
+
|
952
|
+
df1 = gpd.sjoin(picc, df, how="inner", predicate="intersects" )
|
953
|
+
cols = df.columns
|
954
|
+
|
955
|
+
cols = np.append(cols, "GEOREF_ID")
|
956
|
+
cols = np.append(cols, "NATUR_CODE")
|
957
|
+
|
958
|
+
df1 = df1[cols]
|
959
|
+
|
960
|
+
if df1.shape[0] > 0:
|
961
|
+
assert manager.is_polygons(set(df1.geom_type)), f"The layer does not contains polygons - {op}"
|
962
|
+
df1.to_file(output_file, engine=ENGINE)
|
963
|
+
else:
|
964
|
+
logging.warning("No data found for layer " + str(layer))
|
965
|
+
|
966
|
+
elif op == Modif_Type.POINT2POLY_EPURATION:
|
967
|
+
# Change BDREF based on AJOUT_PDET sent by Perrine (SPI)
|
968
|
+
|
969
|
+
# The original layer is a point layer.
|
970
|
+
# The EPU_STATIONS shape file (from SPI) is a polygon layer.
|
971
|
+
|
972
|
+
df1 = gpd.read_file(str(manager.EPU_STATIONS), engine=ENGINE)
|
973
|
+
|
974
|
+
assert df1.crs == df.crs, "CRS of AJOUT_PDET and input data do not match"
|
975
|
+
|
976
|
+
df2 = gpd.sjoin(picc, df1, how="inner", predicate="intersects" )
|
977
|
+
|
978
|
+
if df2.shape[0] > 0:
|
979
|
+
assert manager.is_polygons(set(df2.geom_type)), f"The layer does not contains polygons - {op}"
|
980
|
+
df2.to_file(output_file, engine=ENGINE)
|
981
|
+
else:
|
982
|
+
logging.warning("No data found for layer " + str(layer))
|
983
|
+
|
984
|
+
elif op == Modif_Type.POINT2POLY_PICC:
|
985
|
+
# Select the polygons that contains the points
|
986
|
+
# in theCadaster and PICC files
|
987
|
+
|
988
|
+
assert capa.crs == df.crs, "CRS of CaPa and input data do not match"
|
989
|
+
assert "CaPaKey" in capa.columns, "The CaPa file does not contain the CaPaKey column"
|
990
|
+
|
991
|
+
df1= gpd.sjoin(capa, df, how="inner", predicate="intersects" )
|
992
|
+
cols=df.columns
|
993
|
+
|
994
|
+
cols = np.append(cols, "CaPaKey")
|
995
|
+
df1=df1[cols]
|
996
|
+
df2=gpd.sjoin(picc, df1, how="inner", predicate="intersects" )
|
997
|
+
|
998
|
+
if df2.shape[0] > 0:
|
999
|
+
assert manager.is_polygons(set(df2.geom_type)), f"The layer does not contains polygons - {op}"
|
1000
|
+
df2.to_file(output_file, engine=ENGINE)
|
1001
|
+
else:
|
1002
|
+
logging.warning("No data found for layer " + str(layer))
|
1003
|
+
|
1004
|
+
elif op == Modif_Type.POINT2POLY_CAPAPICC:
|
1005
|
+
|
1006
|
+
# Select the polygons that contains the points
|
1007
|
+
# in theCadaster and PICC files
|
1008
|
+
|
1009
|
+
assert capa.crs == df.crs, "CRS of CaPa and input data do not match"
|
1010
|
+
assert picc.crs == df.crs, "CRS of PICC and input data do not match"
|
1011
|
+
|
1012
|
+
# Join the Layer and CaPa DataFrames : https://geopandas.org/en/stable/docs/reference/api/geopandas.sjoin.html
|
1013
|
+
# ‘inner’: use intersection of keys from both dfs; retain only left_df geometry column
|
1014
|
+
# "intersects" : Binary predicate. Valid values are determined by the spatial index used.
|
1015
|
+
df1= gpd.sjoin(capa, df, how="inner", predicate="intersects" )
|
1016
|
+
|
1017
|
+
# Retain only the columns of the input data
|
1018
|
+
cols = df.columns
|
1019
|
+
# but add the CaPaKey
|
1020
|
+
cols = np.append(cols, "CaPaKey")
|
1021
|
+
|
1022
|
+
df1 = df1[cols]
|
1023
|
+
|
1024
|
+
# Join the df1 and PICC DataFrames : https://geopandas.org/en/stable/docs/reference/api/geopandas.sjoin.html
|
1025
|
+
df2 = gpd.sjoin(picc, df1, how="inner", predicate="intersects" )
|
1026
|
+
|
1027
|
+
# Add only the GEOREF_ID and NATUR_CODE columns from PICC
|
1028
|
+
cols = np.append(cols, "GEOREF_ID")
|
1029
|
+
cols = np.append(cols, "NATUR_CODE")
|
1030
|
+
|
1031
|
+
df2 = df2[cols]
|
1032
|
+
|
1033
|
+
if df2.shape[0] > 0:
|
1034
|
+
assert manager.is_polygons(set(df2.geom_type)), f"The layer does not contains polygons - {op}"
|
1035
|
+
df2.to_file(output_file, engine=ENGINE)
|
1036
|
+
else:
|
1037
|
+
logging.warning("No data found for layer " + str(layer))
|
1038
|
+
|
1039
|
+
elif op == Modif_Type.INHABITED:
|
1040
|
+
# Select only the buildings with a number of inhabitants > 0
|
1041
|
+
df1=df[df["NbsHabTOT"]>0]
|
1042
|
+
|
1043
|
+
if df1.shape[0] > 0:
|
1044
|
+
assert manager.is_polygons(set(df1.geom_type)), f"The layer does not contains polygons - {op}"
|
1045
|
+
df1.to_file(output_file, engine=ENGINE)
|
1046
|
+
else:
|
1047
|
+
logging.warning("No data found for layer " + str(layer))
|
1048
|
+
|
1049
|
+
elif op == Modif_Type.ROAD:
|
1050
|
+
# Create a buffer around the roads
|
1051
|
+
df1=df.buffer(distance=6, cap_style=2)
|
1052
|
+
|
1053
|
+
if df1.shape[0] > 0:
|
1054
|
+
assert set(df1.geom_type) == {'Polygon'}, f"The layer does not contains polygons - {op}"
|
1055
|
+
df1.to_file(output_file, engine=ENGINE)
|
1056
|
+
else:
|
1057
|
+
logging.warning("No data found for layer " + str(layer))
|
1058
|
+
|
1059
|
+
elif op == Modif_Type.COPY:
|
1060
|
+
# just copy the data if it is polygons
|
1061
|
+
if manager.is_polygons(set(df.geom_type)):
|
1062
|
+
df.to_file(output_file, engine=ENGINE)
|
1063
|
+
else:
|
1064
|
+
logging.error("The layer does not contains polygons - " + str(layer))
|
724
1065
|
else:
|
725
|
-
|
1066
|
+
raise ValueError(f"The operand {op} is not recognized")
|
1067
|
+
|
1068
|
+
return "Data modification done for " + str(layer)
|
726
1069
|
else:
|
727
|
-
|
1070
|
+
# Normally, phase 1 does not create empty files
|
1071
|
+
# But it is better to check... ;-)
|
1072
|
+
logging.error("skipped" + str(layer) + "due to no polygon in the study area")
|
1073
|
+
return "skipped" + str(layer) + "due to no polygon in the study area"
|
728
1074
|
|
729
1075
|
def vector_to_raster(layer:str,
|
730
|
-
|
731
|
-
extent:Path,
|
1076
|
+
manager:Accept_Manager,
|
732
1077
|
attribute:str,
|
733
1078
|
pixel_size:float):
|
734
1079
|
"""
|
@@ -745,24 +1090,22 @@ def vector_to_raster(layer:str,
|
|
745
1090
|
old_dir = os.getcwd()
|
746
1091
|
|
747
1092
|
layer = str(layer)
|
748
|
-
|
749
|
-
|
1093
|
+
|
1094
|
+
vector_input = str(manager.TMP_CODEVULNE / (layer + EXTENT))
|
1095
|
+
extent = str(manager.SA)
|
750
1096
|
attribute = str(attribute)
|
751
1097
|
pixel_size = float(pixel_size)
|
752
1098
|
|
753
|
-
|
754
|
-
OUT_NAME = layer + ".tiff"
|
1099
|
+
out_file = manager.TMP_RASTERS / attribute / (layer + ".tiff")
|
755
1100
|
|
756
|
-
|
1101
|
+
if out_file.exists():
|
1102
|
+
os.remove(out_file)
|
757
1103
|
|
758
|
-
|
759
|
-
os.remove(OUT_DIR/OUT_NAME)
|
760
|
-
|
761
|
-
os.chdir(OUT_DIR)
|
1104
|
+
out_file = str(out_file)
|
762
1105
|
|
763
1106
|
NoData_value = 0
|
764
1107
|
|
765
|
-
extent_ds:ogr.DataSource = ogr.Open(
|
1108
|
+
extent_ds:ogr.DataSource = ogr.Open(extent)
|
766
1109
|
extent_layer = extent_ds.GetLayer()
|
767
1110
|
|
768
1111
|
x_min, x_max, y_min, y_max = extent_layer.GetExtent()
|
@@ -773,13 +1116,16 @@ def vector_to_raster(layer:str,
|
|
773
1116
|
y_max = float(np.ceil(y_max))
|
774
1117
|
|
775
1118
|
# Open the data sources and read the extents
|
776
|
-
source_ds:ogr.DataSource = ogr.Open(
|
777
|
-
|
1119
|
+
source_ds:ogr.DataSource = ogr.Open(vector_input)
|
1120
|
+
if source_ds is None:
|
1121
|
+
logging.error(f"Could not open the data source {layer}")
|
1122
|
+
return
|
1123
|
+
source_layer = source_ds.GetLayer()
|
778
1124
|
|
779
1125
|
# Create the destination data source
|
780
1126
|
x_res = int((x_max - x_min) / pixel_size)
|
781
1127
|
y_res = int((y_max - y_min) / pixel_size)
|
782
|
-
target_ds:gdal.Driver = gdal.GetDriverByName('GTiff').Create(
|
1128
|
+
target_ds:gdal.Driver = gdal.GetDriverByName('GTiff').Create(out_file,
|
783
1129
|
x_res, y_res, 1,
|
784
1130
|
gdal.GDT_Byte,
|
785
1131
|
options=["COMPRESS=LZW"])
|
@@ -793,12 +1139,13 @@ def vector_to_raster(layer:str,
|
|
793
1139
|
band.SetNoDataValue(NoData_value)
|
794
1140
|
|
795
1141
|
# Rasterize the areas
|
796
|
-
gdal.RasterizeLayer(target_ds, [1],
|
1142
|
+
gdal.RasterizeLayer(target_ds, [1],
|
1143
|
+
source_layer,
|
1144
|
+
options=["ATTRIBUTE="+attribute,
|
1145
|
+
"ALL_TOUCHED=TRUE"])
|
797
1146
|
target_ds = None
|
798
1147
|
|
799
|
-
|
800
|
-
|
801
|
-
def Comp_Vulnerability(dirsnames:Accept_Manager):
|
1148
|
+
def compute_vulnerability(manager:Accept_Manager):
|
802
1149
|
"""
|
803
1150
|
Compute the vulnerability for the Study Area
|
804
1151
|
|
@@ -807,41 +1154,53 @@ def Comp_Vulnerability(dirsnames:Accept_Manager):
|
|
807
1154
|
:param dirsnames: the Dirs_Names object from the calling function
|
808
1155
|
"""
|
809
1156
|
|
810
|
-
|
811
|
-
|
1157
|
+
vuln_csv = Vulnerability_csv(manager.VULNERABILITY_CSV)
|
1158
|
+
|
1159
|
+
rasters_vuln = manager.get_files_in_rasters_vulne()
|
812
1160
|
|
813
1161
|
logging.info("Number of files",len(rasters_vuln))
|
814
1162
|
|
815
1163
|
ds:gdal.Dataset = gdal.Open(str(rasters_vuln[0]))
|
816
|
-
ds1:gdal.Dataset = gdal.Open(str(rasters_code[0]))
|
817
1164
|
|
818
1165
|
tmp_vuln = np.array(ds.GetRasterBand(1).ReadAsArray())
|
819
|
-
tmp_code = np.array(ds1.GetRasterBand(1).ReadAsArray())
|
820
1166
|
|
821
1167
|
x, y = tmp_vuln.shape
|
822
1168
|
|
823
1169
|
logging.info("Computing Vulnerability")
|
824
1170
|
|
825
|
-
array_vuln = np.
|
826
|
-
array_code = np.
|
827
|
-
|
828
|
-
|
1171
|
+
array_vuln = np.ones((x, y), dtype=np.int8)
|
1172
|
+
array_code = np.ones((x, y), dtype=np.int8)
|
1173
|
+
|
1174
|
+
# Create a JIT function to update the arrays
|
1175
|
+
# Faster than the classical Python loop or Numpy
|
1176
|
+
@nb.jit(nopython=True, boundscheck=False, inline='always')
|
1177
|
+
# @cuda.jit(device=True, inline=True)
|
1178
|
+
def update_arrays_jit(tmp_vuln, loccode, array_vuln, array_code):
|
1179
|
+
for i in range(tmp_vuln.shape[0]):
|
1180
|
+
for j in range(tmp_vuln.shape[1]):
|
1181
|
+
if tmp_vuln[i, j] >= array_vuln[i, j]:
|
1182
|
+
array_vuln[i, j] = tmp_vuln[i, j]
|
1183
|
+
array_code[i, j] = loccode
|
1184
|
+
|
1185
|
+
return array_vuln, array_code
|
1186
|
+
|
1187
|
+
for i in tqdm(range(len(rasters_vuln)), 'Computing Vulnerability : '):
|
829
1188
|
logging.info("Computing layer {} / {}".format(i, len(rasters_vuln)))
|
830
1189
|
ds = gdal.Open(str(rasters_vuln[i]))
|
831
|
-
ds1 = gdal.Open(str(rasters_code[i]))
|
832
1190
|
|
833
1191
|
tmp_vuln = ds.GetRasterBand(1).ReadAsArray()
|
834
|
-
tmp_code = ds1.GetRasterBand(1).ReadAsArray()
|
835
1192
|
|
836
|
-
|
837
|
-
|
838
|
-
|
1193
|
+
loccode = vuln_csv.get_vulnerability_code(rasters_vuln[i].stem)
|
1194
|
+
|
1195
|
+
# We use the jit
|
1196
|
+
update_arrays_jit(tmp_vuln, loccode, array_vuln, array_code)
|
839
1197
|
|
840
|
-
ij = np.where(array_vuln == 0)
|
841
|
-
array_vuln[ij] = 1
|
842
|
-
array_code[ij] = 1
|
1198
|
+
# ij = np.where(array_vuln == 0)
|
1199
|
+
# array_vuln[ij] = 1
|
1200
|
+
# array_code[ij] = 1
|
843
1201
|
|
844
|
-
|
1202
|
+
logging.info("Saving the computed vulnerability")
|
1203
|
+
dst_filename= str(manager.SA_VULN)
|
845
1204
|
y_pixels, x_pixels = array_vuln.shape # number of pixels in x
|
846
1205
|
|
847
1206
|
driver = gdal.GetDriverByName('GTiff')
|
@@ -855,8 +1214,8 @@ def Comp_Vulnerability(dirsnames:Accept_Manager):
|
|
855
1214
|
dataset.FlushCache()
|
856
1215
|
dataset = None
|
857
1216
|
|
858
|
-
|
859
|
-
dst_filename= str(
|
1217
|
+
logging.info("Saving the computed codes")
|
1218
|
+
dst_filename= str(manager.SA_CODE)
|
860
1219
|
y_pixels, x_pixels = array_code.shape # number of pixels in x
|
861
1220
|
driver = gdal.GetDriverByName('GTiff')
|
862
1221
|
dataset = driver.Create(dst_filename, x_pixels, y_pixels, gdal.GDT_Byte, 1, options=["COMPRESS=LZW"])
|
@@ -871,7 +1230,7 @@ def Comp_Vulnerability(dirsnames:Accept_Manager):
|
|
871
1230
|
|
872
1231
|
logging.info("Computed Vulnerability for the Study Area - Done")
|
873
1232
|
|
874
|
-
def
|
1233
|
+
def compute_vulnerability4scenario(manager:Accept_Manager):
|
875
1234
|
""" Compute the vulnerability for the scenario
|
876
1235
|
|
877
1236
|
This function **will modify** the data by the removed buildings/scenarios.
|
@@ -881,16 +1240,16 @@ def Comp_Vulnerability_Scen(dirsnames:Accept_Manager):
|
|
881
1240
|
:param dirsnames: the Dirs_Names object from the calling function
|
882
1241
|
"""
|
883
1242
|
|
884
|
-
array_vuln = gdal.Open(str(
|
1243
|
+
array_vuln = gdal.Open(str(manager.SA_VULN))
|
885
1244
|
geotrans = array_vuln.GetGeoTransform() # get GeoTranform from existed 'data0'
|
886
1245
|
proj = array_vuln.GetProjection() # you can get from a exsited tif or import
|
887
1246
|
|
888
1247
|
array_vuln = np.array(array_vuln.GetRasterBand(1).ReadAsArray())
|
889
1248
|
|
890
|
-
array_code = gdal.Open(str(
|
1249
|
+
array_code = gdal.Open(str(manager.SA_CODE))
|
891
1250
|
array_code = np.array(array_code.GetRasterBand(1).ReadAsArray())
|
892
1251
|
|
893
|
-
Rbu =
|
1252
|
+
Rbu = manager.get_files_in_rm_buildings()
|
894
1253
|
|
895
1254
|
if len(Rbu)>0:
|
896
1255
|
for curfile in Rbu:
|
@@ -901,7 +1260,7 @@ def Comp_Vulnerability_Scen(dirsnames:Accept_Manager):
|
|
901
1260
|
array_vuln[ij] = 1
|
902
1261
|
array_code[ij] = 1
|
903
1262
|
|
904
|
-
dst_filename= str(
|
1263
|
+
dst_filename= str(manager.TMP_VULN)
|
905
1264
|
y_pixels, x_pixels = array_vuln.shape # number of pixels in x
|
906
1265
|
|
907
1266
|
driver = gdal.GetDriverByName('GTiff')
|
@@ -914,7 +1273,7 @@ def Comp_Vulnerability_Scen(dirsnames:Accept_Manager):
|
|
914
1273
|
dataset = None
|
915
1274
|
|
916
1275
|
|
917
|
-
dst_filename= str(
|
1276
|
+
dst_filename= str(manager.TMP_CODE)
|
918
1277
|
y_pixels, x_pixels = array_code.shape # number of pixels in x
|
919
1278
|
driver = gdal.GetDriverByName('GTiff')
|
920
1279
|
dataset = driver.Create(dst_filename, x_pixels, y_pixels, gdal.GDT_Byte, 1, options=["COMPRESS=LZW"])
|
@@ -927,7 +1286,7 @@ def Comp_Vulnerability_Scen(dirsnames:Accept_Manager):
|
|
927
1286
|
|
928
1287
|
logging.info("Computed Vulnerability for the scenario")
|
929
1288
|
|
930
|
-
def
|
1289
|
+
def match_vulnerability2sim(inRas:Path, outRas:Path, MODREC:Path):
|
931
1290
|
"""
|
932
1291
|
Clip the raster to the MODREC/simulation extent
|
933
1292
|
|
@@ -951,7 +1310,7 @@ def match_vuln_modrec(inRas:Path, outRas:Path, MODREC:Path):
|
|
951
1310
|
ds = gdal.Translate(outRas, ds, projWin = [minx, maxy, maxx, miny])
|
952
1311
|
ds = None
|
953
1312
|
|
954
|
-
def
|
1313
|
+
def compute_acceptability(manager:Accept_Manager,
|
955
1314
|
model_h:np.ndarray,
|
956
1315
|
vulnerability:np.ndarray,
|
957
1316
|
interval:int,
|
@@ -973,7 +1332,7 @@ def VulMod(dirsnames:Accept_Manager,
|
|
973
1332
|
|
974
1333
|
logging.info(interval)
|
975
1334
|
|
976
|
-
Qfile = pd.read_csv(
|
1335
|
+
Qfile = pd.read_csv(manager.POINTS_CSV)
|
977
1336
|
|
978
1337
|
Qfile = Qfile[Qfile["Interval"]==interval]
|
979
1338
|
Qfile = Qfile.reset_index()
|
@@ -1003,7 +1362,7 @@ def VulMod(dirsnames:Accept_Manager,
|
|
1003
1362
|
accept[ij[0][loc_ij], ij[1][loc_ij]] = loc_accept[idx]
|
1004
1363
|
|
1005
1364
|
#save raster
|
1006
|
-
dst_filename = str(
|
1365
|
+
dst_filename = str(manager.TMP_QFILES / "Q{}.tif".format(interval))
|
1007
1366
|
|
1008
1367
|
y_pixels, x_pixels = accept.shape # number of pixels in x
|
1009
1368
|
driver = gdal.GetDriverByName('GTiff')
|
@@ -1018,7 +1377,12 @@ def VulMod(dirsnames:Accept_Manager,
|
|
1018
1377
|
|
1019
1378
|
def shp_to_raster(vector_fn:str, raster_fn:str, pixel_size:float = 1.):
|
1020
1379
|
"""
|
1021
|
-
Convert a vector layer to a raster tiff file
|
1380
|
+
Convert a vector layer to a raster tiff file.
|
1381
|
+
|
1382
|
+
The raster will contain only 2 values : 0 and 1
|
1383
|
+
|
1384
|
+
- 1 : the inside of the vector layer
|
1385
|
+
- 0 : the rest == NoData/NullValue
|
1022
1386
|
|
1023
1387
|
:param vector_fn: the path to the vector file
|
1024
1388
|
:param raster_fn: the path to the raster file
|
@@ -1029,7 +1393,7 @@ def shp_to_raster(vector_fn:str, raster_fn:str, pixel_size:float = 1.):
|
|
1029
1393
|
vector_fn = str(vector_fn)
|
1030
1394
|
raster_fn = str(raster_fn)
|
1031
1395
|
|
1032
|
-
NoData_value = np.nan
|
1396
|
+
NoData_value = 0 # np.nan is not necessary a good idea
|
1033
1397
|
# Open the data sources and read the extents
|
1034
1398
|
source_ds = ogr.Open(vector_fn)
|
1035
1399
|
source_layer = source_ds.GetLayer()
|
@@ -1043,7 +1407,8 @@ def shp_to_raster(vector_fn:str, raster_fn:str, pixel_size:float = 1.):
|
|
1043
1407
|
# Create the destination data source
|
1044
1408
|
x_res = int((x_max - x_min) / pixel_size)
|
1045
1409
|
y_res = int((y_max - y_min) / pixel_size)
|
1046
|
-
|
1410
|
+
|
1411
|
+
target_ds = gdal.GetDriverByName('GTiff').Create(raster_fn, x_res, y_res, 1, gdal.GDT_Byte,
|
1047
1412
|
options=["COMPRESS=LZW"])
|
1048
1413
|
|
1049
1414
|
target_ds.SetGeoTransform((x_min, pixel_size, 0, y_max, 0, -pixel_size))
|
@@ -1053,6 +1418,10 @@ def shp_to_raster(vector_fn:str, raster_fn:str, pixel_size:float = 1.):
|
|
1053
1418
|
band = target_ds.GetRasterBand(1)
|
1054
1419
|
band.SetNoDataValue(NoData_value)
|
1055
1420
|
# Rasterize the areas
|
1056
|
-
gdal.RasterizeLayer(target_ds,
|
1421
|
+
gdal.RasterizeLayer(target_ds,
|
1422
|
+
bands = [1],
|
1423
|
+
layer = source_layer,
|
1424
|
+
burn_values = [1],
|
1425
|
+
options=["ALL_TOUCHED=TRUE"])
|
1057
1426
|
target_ds = None
|
1058
1427
|
vector_fn = raster_fn = None
|