wolfhece 2.1.23__py3-none-any.whl → 2.1.25__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- wolfhece/acceptability/Parallels.py +81 -0
- wolfhece/acceptability/__init__.py +3 -0
- wolfhece/acceptability/acceptability.py +420 -0
- wolfhece/acceptability/acceptability1.py +211 -0
- wolfhece/acceptability/acceptability_gui.py +318 -0
- wolfhece/acceptability/cli.py +150 -0
- wolfhece/acceptability/func.py +1058 -0
- wolfhece/apps/version.py +1 -1
- wolfhece/cli.py +5 -0
- wolfhece/wolf_array.py +16 -4
- {wolfhece-2.1.23.dist-info → wolfhece-2.1.25.dist-info}/METADATA +1 -1
- {wolfhece-2.1.23.dist-info → wolfhece-2.1.25.dist-info}/RECORD +15 -8
- {wolfhece-2.1.23.dist-info → wolfhece-2.1.25.dist-info}/entry_points.txt +4 -1
- {wolfhece-2.1.23.dist-info → wolfhece-2.1.25.dist-info}/WHEEL +0 -0
- {wolfhece-2.1.23.dist-info → wolfhece-2.1.25.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,1058 @@
|
|
1
|
+
import geopandas as gpd
|
2
|
+
import pandas as pd
|
3
|
+
import numpy as np
|
4
|
+
from osgeo import gdal, ogr, osr, gdalconst
|
5
|
+
import os
|
6
|
+
import glob
|
7
|
+
from pathlib import Path
|
8
|
+
import logging
|
9
|
+
from tqdm import tqdm
|
10
|
+
|
11
|
+
def get_data_type(fname:Path):
|
12
|
+
|
13
|
+
fname = Path(fname)
|
14
|
+
""" Get the data type of the input file from extension """
|
15
|
+
if fname.name.endswith('.gpkg'):
|
16
|
+
return 'GPKG'
|
17
|
+
elif fname.name.endswith('.shp'):
|
18
|
+
return 'ESRI Shapefile'
|
19
|
+
elif fname.name.endswith('.gdb'):
|
20
|
+
return 'OpenfileGDB'
|
21
|
+
else:
|
22
|
+
return None
|
23
|
+
|
24
|
+
def cleaning_directory(dir:Path):
|
25
|
+
""" Cleaning the directory """
|
26
|
+
|
27
|
+
logging.info("Cleaning the directory {}".format(dir))
|
28
|
+
|
29
|
+
files_in_output = list(dir.iterdir())
|
30
|
+
for item in files_in_output:
|
31
|
+
if item.is_file():
|
32
|
+
os.remove(item)
|
33
|
+
|
34
|
+
class Accept_Manager():
|
35
|
+
"""
|
36
|
+
Structure to store the directories and names of the files.
|
37
|
+
|
38
|
+
In the main directory, the following directories are mandatory/created:
|
39
|
+
- INPUT : filled by the user - contains the input data
|
40
|
+
- TEMP : created by the script - contains the temporary data for the study area
|
41
|
+
- OUTPUT: created by the script - contains the output data for each scenario of the study area
|
42
|
+
|
43
|
+
The INPUT directory contains the following subdirectories:
|
44
|
+
- DATABASE: contains the data for the **entire Walloon region**
|
45
|
+
- Cadastre_Walloon.gpkg: the Cadastre Walloon file
|
46
|
+
- GT_Resilence_dataRisques202010.gdb: the original gdb file from SPW - GT Resilience
|
47
|
+
- PICC-vDIFF.gdb: the PICC Walloon file
|
48
|
+
- CE_IGN_TOP10V: the IGN top10v shapefile
|
49
|
+
- EPU_STATIONS_NEW:
|
50
|
+
- AJOUT_PDET_EPU_DG03_STATIONS.shp: the EPU stations shapefile
|
51
|
+
- STUDY_AREA: contains the study area shapefiles - one for each study area - e.g. Bassin_Vesdre.shp
|
52
|
+
- CSVs: contains the CSV files
|
53
|
+
- Intermediate.csv: contains the matrices data for the acceptability computation
|
54
|
+
# - Ponderation.csv: contains the ponderation data for the acceptability computation
|
55
|
+
- Vulnerability.csv: contains the mapping between layers and vulnerability levels - a code value is also provided
|
56
|
+
- WATER_DEPTH: contains the water depth data for each scenario
|
57
|
+
- Study_area1:
|
58
|
+
- Scenario1
|
59
|
+
- Scenario2
|
60
|
+
-...
|
61
|
+
- ScenarioN
|
62
|
+
- Study_area2:
|
63
|
+
- Scenario1
|
64
|
+
- Scenario2
|
65
|
+
-...
|
66
|
+
- ScenarioN
|
67
|
+
-...
|
68
|
+
- Study_areaN:
|
69
|
+
- Scenario1
|
70
|
+
- Scenario2
|
71
|
+
-...
|
72
|
+
- ScenarioN
|
73
|
+
|
74
|
+
The TEMP directory contains the following subdirectories:
|
75
|
+
- DATABASES: contains the temporary data each study area
|
76
|
+
- Study_area1:
|
77
|
+
- database.gpkg: the clipped database
|
78
|
+
- CaPa.gpkg: the clipped Cadastre Walloon file
|
79
|
+
- PICC.gpkg: the clipped PICC Walloon file
|
80
|
+
- database_final.gpkg: the final database
|
81
|
+
- database_final_V.gpkg: the final database with vulnerability levels
|
82
|
+
- CE_IGN_TOP10V.tiff: the IGN top10v raster file
|
83
|
+
- Maske_River_extent.tiff: the river extent raster file from IGN
|
84
|
+
- VULNERABILITY: the vulnerability data
|
85
|
+
- RASTERS:
|
86
|
+
- Code : one file for each layer
|
87
|
+
- Vulne : one file for each layer
|
88
|
+
- Scenario1:
|
89
|
+
|
90
|
+
"""
|
91
|
+
|
92
|
+
def __init__(self,
|
93
|
+
main_dir:str = 'Data',
|
94
|
+
Study_area:str = 'Bassin_Vesdre.shp',
|
95
|
+
scenario = None,
|
96
|
+
Original_gdb:str = 'GT_Resilence_dataRisques202010.gdb',
|
97
|
+
CaPa_Walloon:str = 'Cadastre_Walloon.gpkg',
|
98
|
+
PICC_Walloon:str = 'PICC_vDIFF.gdb',
|
99
|
+
CE_IGN_top10v:str = 'CE_IGN_TOP10V/CE_IGN_TOP10V.shp',
|
100
|
+
) -> None:
|
101
|
+
|
102
|
+
self.old_dir:Path = Path(os.getcwd())
|
103
|
+
|
104
|
+
self.main_dir:Path = Path(main_dir)
|
105
|
+
|
106
|
+
# If it is a string, concatenate it with the current directory
|
107
|
+
if not self.main_dir.is_absolute():
|
108
|
+
self.main_dir = Path(os.getcwd()) / self.main_dir
|
109
|
+
|
110
|
+
self._study_area = Study_area
|
111
|
+
if Study_area is not None:
|
112
|
+
if not self._study_area.endswith('.shp'):
|
113
|
+
self._study_area += '.shp'
|
114
|
+
|
115
|
+
self._scenario = scenario
|
116
|
+
self._original_gdb = Original_gdb
|
117
|
+
self._capa_walloon = CaPa_Walloon
|
118
|
+
self._picc_walloon = PICC_Walloon
|
119
|
+
self._ce_ign_top10v = CE_IGN_top10v
|
120
|
+
|
121
|
+
self.IN_DIR = self.main_dir / "INPUT"
|
122
|
+
self.IN_DATABASE = self.IN_DIR / "DATABASE"
|
123
|
+
self.IN_STUDY_AREA = self.IN_DIR / "STUDY_AREA"
|
124
|
+
self.IN_CSV = self.IN_DIR / "CSVs"
|
125
|
+
self.IN_WATER_DEPTH = self.IN_DIR / "WATER_DEPTH"
|
126
|
+
|
127
|
+
self.ORIGINAL_GDB = self.IN_DATABASE / self._original_gdb
|
128
|
+
self.CAPA_WALLOON = self.IN_DATABASE / self._capa_walloon
|
129
|
+
self.PICC_WALLOON = self.IN_DATABASE / self._picc_walloon
|
130
|
+
self.CE_IGN_TOP10V = self.IN_DATABASE / self._ce_ign_top10v
|
131
|
+
|
132
|
+
self.VULNERABILITY_CSV = self.IN_CSV / "Vulnerability.csv"
|
133
|
+
self.POINTS_CSV = self.IN_CSV / "Intermediate.csv"
|
134
|
+
# self.PONDERATION_CSV = self.IN_CSV / "Ponderation.csv"
|
135
|
+
|
136
|
+
self._CSVs = [self.VULNERABILITY_CSV, self.POINTS_CSV] #, self.PONDERATION_CSV]
|
137
|
+
self._GPKGs= [self.CAPA_WALLOON, self.PICC_WALLOON]
|
138
|
+
self._GDBs = [self.ORIGINAL_GDB]
|
139
|
+
self._SHPs = [self.CE_IGN_TOP10V]
|
140
|
+
self._ALLS = self._CSVs + self._GPKGs + self._GDBs + self._SHPs
|
141
|
+
|
142
|
+
self.TMP_DIR = self.main_dir / "TEMP"
|
143
|
+
|
144
|
+
self.TMP_DATABASE = self.TMP_DIR / "DATABASES"
|
145
|
+
|
146
|
+
self.OUT_DIR = self.main_dir / "OUTPUT"
|
147
|
+
|
148
|
+
self.create_paths()
|
149
|
+
self.create_paths_scenario()
|
150
|
+
|
151
|
+
def create_paths(self):
|
152
|
+
""" Create the paths for the directories and files """
|
153
|
+
|
154
|
+
if self._study_area is not None:
|
155
|
+
|
156
|
+
self.Study_area:Path = Path(self._study_area)
|
157
|
+
|
158
|
+
self.TMP_STUDYAREA = self.TMP_DATABASE / self.Study_area.stem
|
159
|
+
self.TMP_VULN_DIR = self.TMP_STUDYAREA / "VULNERABILITY"
|
160
|
+
self.TMP_RASTERS = self.TMP_VULN_DIR / "RASTERS"
|
161
|
+
self.TMP_RASTERS_CODE = self.TMP_RASTERS / "Code"
|
162
|
+
self.TMP_RASTERS_VULNE = self.TMP_RASTERS / "Vulne"
|
163
|
+
|
164
|
+
self.OUT_STUDY_AREA = self.OUT_DIR / self.Study_area.stem
|
165
|
+
|
166
|
+
self.SA = self.IN_STUDY_AREA / self.Study_area
|
167
|
+
self.SA_DATABASE = self.TMP_STUDYAREA / "database.gpkg"
|
168
|
+
self.SA_CAPA = self.TMP_STUDYAREA / "CaPa.gpkg"
|
169
|
+
self.SA_PICC = self.TMP_STUDYAREA / "PICC.gpkg"
|
170
|
+
self.SA_FINAL = self.TMP_STUDYAREA / "database_final.gpkg"
|
171
|
+
self.SA_FINAL_V = self.TMP_STUDYAREA / "database_final_V.gpkg"
|
172
|
+
self.SA_MASKED_RIVER = self.TMP_STUDYAREA / "CE_IGN_TOP10V.tiff"
|
173
|
+
|
174
|
+
self.SA_VULN = self.TMP_VULN_DIR / "Vulnerability.tiff"
|
175
|
+
self.SA_CODE = self.TMP_VULN_DIR / "Vulnerability_Code.tiff"
|
176
|
+
|
177
|
+
else:
|
178
|
+
self.Study_area = None
|
179
|
+
self._scenario = None
|
180
|
+
|
181
|
+
self.TMP_STUDYAREA = None
|
182
|
+
self.TMP_VULN_DIR = None
|
183
|
+
self.TMP_RASTERS = None
|
184
|
+
self.TMP_RASTERS_CODE = None
|
185
|
+
self.TMP_RASTERS_VULNE = None
|
186
|
+
|
187
|
+
self.OUT_STUDY_AREA = None
|
188
|
+
|
189
|
+
self.SA = None
|
190
|
+
self.SA_DATABASE = None
|
191
|
+
self.SA_CAPA = None
|
192
|
+
self.SA_PICC = None
|
193
|
+
self.SA_FINAL = None
|
194
|
+
self.SA_FINAL_V = None
|
195
|
+
self.SA_MASKED_RIVER = None
|
196
|
+
|
197
|
+
self.SA_VULN = None
|
198
|
+
self.SA_CODE = None
|
199
|
+
|
200
|
+
self.create_paths_scenario()
|
201
|
+
|
202
|
+
self.check_inputs()
|
203
|
+
self.check_temporary()
|
204
|
+
self.check_outputs()
|
205
|
+
|
206
|
+
def create_paths_scenario(self):
|
207
|
+
|
208
|
+
if self._scenario is not None:
|
209
|
+
|
210
|
+
self.scenario:str = str(self._scenario)
|
211
|
+
|
212
|
+
self.IN_SCEN_DIR = self.IN_WATER_DEPTH / self.SA.stem / self.scenario
|
213
|
+
self.IN_RM_BUILD_DIR = self.IN_SCEN_DIR / "REMOVED_BUILDINGS"
|
214
|
+
|
215
|
+
self.TMP_SCEN_DIR = self.TMP_VULN_DIR / self.scenario
|
216
|
+
self.TMP_RM_BUILD_DIR = self.TMP_SCEN_DIR / "REMOVED_BUILDINGS"
|
217
|
+
self.TMP_QFILES = self.TMP_SCEN_DIR / "Q_FILES"
|
218
|
+
|
219
|
+
self.TMP_VULN = self.TMP_SCEN_DIR / "Vulnerability.tiff"
|
220
|
+
self.TMP_CODE = self.TMP_SCEN_DIR / "Vulnerability_Code.tiff"
|
221
|
+
|
222
|
+
self.OUT_SCEN_DIR = self.OUT_STUDY_AREA / self.scenario
|
223
|
+
self.OUT_VULN = self.OUT_SCEN_DIR / "Vulnerability.tiff"
|
224
|
+
self.OUT_CODE = self.OUT_SCEN_DIR / "Vulnerability_Code.tiff"
|
225
|
+
self.OUT_MASKED_RIVER = self.OUT_SCEN_DIR / "Masked_River_extent.tiff"
|
226
|
+
self.OUT_ACCEPT = self.OUT_SCEN_DIR / "Acceptability.tiff"
|
227
|
+
self.OUT_ACCEPT_100M = self.OUT_SCEN_DIR / "Acceptability_100m.tiff"
|
228
|
+
|
229
|
+
else:
|
230
|
+
self.scenario = None
|
231
|
+
|
232
|
+
self.IN_SCEN_DIR = None
|
233
|
+
self.IN_RM_BUILD_DIR = None
|
234
|
+
|
235
|
+
self.TMP_SCEN_DIR = None
|
236
|
+
self.TMP_RM_BUILD_DIR = None
|
237
|
+
self.TMP_QFILES = None
|
238
|
+
|
239
|
+
self.TMP_VULN = None
|
240
|
+
self.TMP_CODE = None
|
241
|
+
|
242
|
+
self.OUT_SCEN_DIR = None
|
243
|
+
self.OUT_VULN = None
|
244
|
+
self.OUT_CODE = None
|
245
|
+
self.OUT_MASKED_RIVER = None
|
246
|
+
self.OUT_ACCEPT = None
|
247
|
+
self.OUT_ACCEPT_100M = None
|
248
|
+
|
249
|
+
@property
|
250
|
+
def is_valid_inputs(self) -> bool:
|
251
|
+
return self.check_inputs()
|
252
|
+
|
253
|
+
@property
|
254
|
+
def is_valid_study_area(self) -> bool:
|
255
|
+
return self.SA.exists()
|
256
|
+
|
257
|
+
@property
|
258
|
+
def is_valid_vulnerability_csv(self) -> bool:
|
259
|
+
return self.VULNERABILITY_CSV.exists()
|
260
|
+
|
261
|
+
@property
|
262
|
+
def is_valid_points_csv(self) -> bool:
|
263
|
+
return self.POINTS_CSV.exists()
|
264
|
+
|
265
|
+
@property
|
266
|
+
def is_valid_ponderation_csv(self) -> bool:
|
267
|
+
return self.PONDERATION_CSV.exists()
|
268
|
+
|
269
|
+
def check_files(self) -> str:
|
270
|
+
""" Check the files in the directories """
|
271
|
+
|
272
|
+
files = ""
|
273
|
+
for a in self._ALLS:
|
274
|
+
if not a.exists():
|
275
|
+
files += str(a) + "\n"
|
276
|
+
|
277
|
+
return files
|
278
|
+
|
279
|
+
def change_studyarea(self, Study_area:str = None) -> None:
|
280
|
+
|
281
|
+
if Study_area is None:
|
282
|
+
self._study_area = None
|
283
|
+
self._scenario = None
|
284
|
+
else:
|
285
|
+
if Study_area in self.get_list_studyareas(with_suffix=True):
|
286
|
+
self._study_area = Path(Study_area)
|
287
|
+
else:
|
288
|
+
logging.error("The study area does not exist in the study area directory")
|
289
|
+
|
290
|
+
self.create_paths()
|
291
|
+
|
292
|
+
def change_scenario(self, scenario:str) -> None:
|
293
|
+
|
294
|
+
if scenario in self.get_list_scenarios():
|
295
|
+
self._scenario = scenario
|
296
|
+
self.create_paths_scenario()
|
297
|
+
self.check_temporary()
|
298
|
+
self.check_outputs()
|
299
|
+
else:
|
300
|
+
logging.error("The scenario does not exist in the water depth directory")
|
301
|
+
|
302
|
+
def get_files_in_rm_buildings(self) -> list[Path]:
|
303
|
+
return [Path(a) for a in glob.glob(str(self.IN_RM_BUILD_DIR / "*.shp"))]
|
304
|
+
|
305
|
+
def get_files_in_rasters_vulne(self) -> list[Path]:
|
306
|
+
return [Path(a) for a in glob.glob(str(self.TMP_RASTERS_VULNE / "*.tiff"))]
|
307
|
+
|
308
|
+
def get_files_in_rasters_code(self) -> list[Path]:
|
309
|
+
return [Path(a) for a in glob.glob(str(self.TMP_RASTERS_CODE / "*.tiff"))]
|
310
|
+
|
311
|
+
def get_q_files(self) -> list[Path]:
|
312
|
+
return [Path(a) for a in glob.glob(str(self.TMP_QFILES / "*.tif"))]
|
313
|
+
|
314
|
+
def get_list_scenarios(self) -> list[str]:
|
315
|
+
return [Path(a).stem for a in glob.glob(str(self.IN_WATER_DEPTH / self.SA.stem / "Scenario*"))]
|
316
|
+
|
317
|
+
def get_list_studyareas(self, with_suffix:bool = False) -> list[str]:
|
318
|
+
|
319
|
+
if with_suffix:
|
320
|
+
return [Path(a).name for a in glob.glob(str(self.IN_STUDY_AREA / "*.shp"))]
|
321
|
+
else:
|
322
|
+
return [Path(a).stem for a in glob.glob(str(self.IN_STUDY_AREA / "*.shp"))]
|
323
|
+
|
324
|
+
def get_sims_files_for_scenario(self) -> list[Path]:
|
325
|
+
|
326
|
+
return [Path(a) for a in glob.glob(str(self.IN_SCEN_DIR / "*.tif"))]
|
327
|
+
|
328
|
+
def get_sim_file_for_return_period(self, return_period:int) -> Path:
|
329
|
+
|
330
|
+
sims = self.get_sims_files_for_scenario()
|
331
|
+
|
332
|
+
if len(sims)==0:
|
333
|
+
logging.error("No simulations found")
|
334
|
+
return None
|
335
|
+
|
336
|
+
if "_h.tif" in sims[0].name:
|
337
|
+
for cursim in sims:
|
338
|
+
if cursim.stem.find("_T{}_".format(return_period)) != -1:
|
339
|
+
return cursim
|
340
|
+
else:
|
341
|
+
for cursim in sims:
|
342
|
+
if cursim.stem.find("T{}".format(return_period)) != -1:
|
343
|
+
return cursim
|
344
|
+
|
345
|
+
return None
|
346
|
+
|
347
|
+
def get_return_periods(self) -> list[int]:
|
348
|
+
|
349
|
+
sims = self.get_sims_files_for_scenario()
|
350
|
+
|
351
|
+
if len(sims)==0:
|
352
|
+
logging.error("No simulations found")
|
353
|
+
return None
|
354
|
+
|
355
|
+
if "_h.tif" in sims[0].name:
|
356
|
+
idx_T = [cursim.stem.find("_T") for cursim in sims]
|
357
|
+
idx_h = [cursim.stem.find("_h.tif") for cursim in sims]
|
358
|
+
sims = [int(cursim.stem[idx_T[i]+2:idx_h[i]-1]) for i, cursim in enumerate(sims)]
|
359
|
+
else:
|
360
|
+
idx_T = [cursim.stem.find("T") for cursim in sims]
|
361
|
+
idx_h = [cursim.stem.find(".tif") for cursim in sims]
|
362
|
+
sims = [int(cursim.stem[idx_T[i]+1:idx_h[i]]) for i, cursim in enumerate(sims)]
|
363
|
+
|
364
|
+
return sorted(sims)
|
365
|
+
|
366
|
+
def get_ponderations(self) -> pd.DataFrame:
|
367
|
+
""" Get the ponderation data from available simulations """
|
368
|
+
|
369
|
+
rt = self.get_return_periods()
|
370
|
+
|
371
|
+
if len(rt)==0:
|
372
|
+
logging.error("No simulations found")
|
373
|
+
return None
|
374
|
+
|
375
|
+
pond = []
|
376
|
+
|
377
|
+
pond.append(1./float(rt[0]) + (1./float(rt[0]) - 1./float(rt[1]))/2.)
|
378
|
+
for i in range(1, len(rt)-1):
|
379
|
+
# pond.append((1./float(rt[i-1]) - 1./float(rt[i]))/2. + (1./float(rt[i]) - 1./float(rt[i+1]))/2.)
|
380
|
+
pond.append((1./float(rt[i-1]) - 1./float(rt[i+1]))/2.)
|
381
|
+
pond.append(1./float(rt[-1]) + (1./float(rt[-2]) - 1./float(rt[-1]))/2.)
|
382
|
+
|
383
|
+
return pd.DataFrame(pond, columns=["Ponderation"], index=rt)
|
384
|
+
|
385
|
+
def get_filepath_for_return_period(self, return_period:int) -> Path:
|
386
|
+
|
387
|
+
return self.get_sim_file_for_return_period(return_period)
|
388
|
+
|
389
|
+
def change_dir(self) -> None:
|
390
|
+
os.chdir(self.main_dir)
|
391
|
+
logging.info("Current directory: %s", os.getcwd())
|
392
|
+
|
393
|
+
def restore_dir(self) -> None:
|
394
|
+
os.chdir(self.old_dir)
|
395
|
+
logging.info("Current directory: %s", os.getcwd())
|
396
|
+
|
397
|
+
def check_inputs(self) -> bool:
|
398
|
+
"""
|
399
|
+
Check if the input directories exist.
|
400
|
+
|
401
|
+
Inputs can not be created automatically. The user must provide them.
|
402
|
+
|
403
|
+
"""
|
404
|
+
|
405
|
+
err = False
|
406
|
+
if not self.IN_DATABASE.exists():
|
407
|
+
logging.error("INPUT : The database directory does not exist")
|
408
|
+
err = True
|
409
|
+
|
410
|
+
if not self.IN_STUDY_AREA.exists():
|
411
|
+
logging.error("INPUT : The study area directory does not exist")
|
412
|
+
err = True
|
413
|
+
|
414
|
+
if not self.IN_CSV.exists():
|
415
|
+
logging.error("INPUT : The CSV directory does not exist")
|
416
|
+
err = True
|
417
|
+
|
418
|
+
if not self.IN_WATER_DEPTH.exists():
|
419
|
+
logging.error("INPUT : The water depth directory does not exist")
|
420
|
+
err = True
|
421
|
+
|
422
|
+
if self.Study_area is not None:
|
423
|
+
if not self.SA.exists():
|
424
|
+
logging.error("INPUT : The study area file does not exist")
|
425
|
+
err = True
|
426
|
+
|
427
|
+
if not self.ORIGINAL_GDB.exists():
|
428
|
+
logging.error("INPUT : The original gdb file does not exist - Please pull it from the SPW-ARNE")
|
429
|
+
err = True
|
430
|
+
|
431
|
+
if not self.CAPA_WALLOON.exists():
|
432
|
+
logging.error("INPUT : The Cadastre Walloon file does not exist - Please pull it from the SPW")
|
433
|
+
err = True
|
434
|
+
|
435
|
+
if not self.PICC_WALLOON.exists():
|
436
|
+
logging.error("INPUT : The PICC Walloon file does not exist - Please pull it from the SPW website")
|
437
|
+
err = True
|
438
|
+
|
439
|
+
if not self.CE_IGN_TOP10V.exists():
|
440
|
+
logging.error("INPUT : The CE IGN top10v file does not exist - Please pull it from the IGN")
|
441
|
+
err = True
|
442
|
+
|
443
|
+
if self.scenario is None:
|
444
|
+
logging.warning("The scenario has not been defined")
|
445
|
+
else:
|
446
|
+
if not self.IN_SCEN_DIR.exists():
|
447
|
+
logging.error("The scenario directory does not exist")
|
448
|
+
err = True
|
449
|
+
|
450
|
+
return not err
|
451
|
+
|
452
|
+
def check_temporary(self) -> bool:
|
453
|
+
"""
|
454
|
+
Check if the temporary directories exist.
|
455
|
+
|
456
|
+
If not, create them.
|
457
|
+
"""
|
458
|
+
|
459
|
+
self.TMP_DIR.mkdir(parents=True, exist_ok=True)
|
460
|
+
self.TMP_DATABASE.mkdir(parents=True, exist_ok=True)
|
461
|
+
|
462
|
+
if self.Study_area is not None:
|
463
|
+
self.TMP_STUDYAREA.mkdir(parents=True, exist_ok=True)
|
464
|
+
self.TMP_VULN_DIR.mkdir(parents=True, exist_ok=True)
|
465
|
+
|
466
|
+
if self.scenario is not None:
|
467
|
+
self.TMP_SCEN_DIR.mkdir(parents=True, exist_ok=True)
|
468
|
+
self.TMP_RM_BUILD_DIR.mkdir(parents=True, exist_ok=True)
|
469
|
+
self.TMP_QFILES.mkdir(parents=True, exist_ok=True)
|
470
|
+
|
471
|
+
return True
|
472
|
+
|
473
|
+
def check_outputs(self) -> bool:
|
474
|
+
"""
|
475
|
+
Check if the output directories exist.
|
476
|
+
|
477
|
+
If not, create them.
|
478
|
+
"""
|
479
|
+
|
480
|
+
self.OUT_DIR.mkdir(parents=True, exist_ok=True)
|
481
|
+
|
482
|
+
if self.Study_area is not None:
|
483
|
+
self.OUT_STUDY_AREA.mkdir(parents=True, exist_ok=True)
|
484
|
+
|
485
|
+
if self.scenario is not None:
|
486
|
+
self.OUT_SCEN_DIR.mkdir(parents=True, exist_ok=True)
|
487
|
+
|
488
|
+
return True
|
489
|
+
|
490
|
+
def check_database_creation(self) -> bool:
|
491
|
+
"""
|
492
|
+
Check if the database files have been created.
|
493
|
+
"""
|
494
|
+
|
495
|
+
if not self.SA_DATABASE.exists():
|
496
|
+
logging.error("The database file does not exist")
|
497
|
+
return False
|
498
|
+
|
499
|
+
if not self.SA_CAPA.exists():
|
500
|
+
logging.error("The Cadastre Walloon file does not exist")
|
501
|
+
return False
|
502
|
+
|
503
|
+
if not self.SA_PICC.exists():
|
504
|
+
logging.error("The PICC Walloon file does not exist")
|
505
|
+
return False
|
506
|
+
|
507
|
+
if not self.SA_FINAL.exists():
|
508
|
+
logging.error("The final database file does not exist")
|
509
|
+
return False
|
510
|
+
|
511
|
+
if not self.SA_FINAL_V.exists():
|
512
|
+
logging.error("The final database with vulnerability levels does not exist")
|
513
|
+
return False
|
514
|
+
|
515
|
+
return True
|
516
|
+
|
517
|
+
def check_before_database_creation(self) -> bool:
|
518
|
+
""" Check if the necessary files are present before the database creation"""
|
519
|
+
|
520
|
+
if not self.is_valid_inputs:
|
521
|
+
logging.error("Theere are missing input directories - Please check carefully the input directories and the logs")
|
522
|
+
return False
|
523
|
+
|
524
|
+
if not self.is_valid_study_area:
|
525
|
+
logging.error("The study area file does not exist - Please create it")
|
526
|
+
return False
|
527
|
+
|
528
|
+
if not self.is_valid_vulnerability_csv:
|
529
|
+
logging.error("The vulnerability CSV file does not exist - Please create it")
|
530
|
+
return False
|
531
|
+
|
532
|
+
return True
|
533
|
+
|
534
|
+
def check_before_rasterize(self) -> bool:
|
535
|
+
|
536
|
+
if not self.SA_FINAL_V.exists():
|
537
|
+
logging.error("The final database with vulnerability levels does not exist")
|
538
|
+
return False
|
539
|
+
|
540
|
+
if not self.SA.exists():
|
541
|
+
logging.error("The study area file does not exist")
|
542
|
+
return False
|
543
|
+
|
544
|
+
return True
|
545
|
+
|
546
|
+
def check_before_vulnerability(self) -> bool:
|
547
|
+
|
548
|
+
if not self.SA.exists():
|
549
|
+
logging.error("The area of interest does not exist")
|
550
|
+
return False
|
551
|
+
|
552
|
+
if not self.IN_WATER_DEPTH.exists():
|
553
|
+
logging.error("The water depth directory does not exist")
|
554
|
+
return False
|
555
|
+
|
556
|
+
if not self.IN_SCEN_DIR.exists():
|
557
|
+
logging.error("The scenario directory does not exist in the water depth directory")
|
558
|
+
return False
|
559
|
+
|
560
|
+
if not self.SA_MASKED_RIVER.exists():
|
561
|
+
logging.error("The IGN raster does not exist")
|
562
|
+
return False
|
563
|
+
|
564
|
+
return True
|
565
|
+
|
566
|
+
def check_vuln_code_sa(self) -> bool:
|
567
|
+
|
568
|
+
if not self.SA_VULN.exists():
|
569
|
+
logging.error("The vulnerability raster file does not exist")
|
570
|
+
return False
|
571
|
+
|
572
|
+
if not self.SA_CODE.exists():
|
573
|
+
logging.error("The vulnerability code raster file does not exist")
|
574
|
+
return False
|
575
|
+
|
576
|
+
return True
|
577
|
+
|
578
|
+
def check_vuln_code_scenario(self) -> bool:
|
579
|
+
|
580
|
+
if not self.TMP_VULN.exists():
|
581
|
+
logging.error("The vulnerability raster file does not exist")
|
582
|
+
return False
|
583
|
+
|
584
|
+
if not self.TMP_CODE.exists():
|
585
|
+
logging.error("The vulnerability code raster file does not exist")
|
586
|
+
return False
|
587
|
+
|
588
|
+
return True
|
589
|
+
|
590
|
+
# Step 1, Clip GDB data
|
591
|
+
|
592
|
+
def gpd_clip(layer:str,
|
593
|
+
file_path:str,
|
594
|
+
Study_Area:str,
|
595
|
+
geopackage:str):
|
596
|
+
"""
|
597
|
+
Clip the input data based on the selected bassin and saves it in a separate database
|
598
|
+
|
599
|
+
:param layer: the layer name in the GDB file
|
600
|
+
:param file_path: the path to the GDB file
|
601
|
+
:param Study_Area: the path to the study area shapefile
|
602
|
+
:param geopackage: the path to the geopackage file
|
603
|
+
"""
|
604
|
+
|
605
|
+
layer = str(layer)
|
606
|
+
file_path = str(file_path)
|
607
|
+
Study_Area = str(Study_Area)
|
608
|
+
geopackage = str(geopackage)
|
609
|
+
|
610
|
+
St_Area = gpd.read_file(Study_Area)
|
611
|
+
|
612
|
+
logging.info(layer)
|
613
|
+
|
614
|
+
# The data is clipped during the reading
|
615
|
+
# **It is more efficient than reading the entire data and then clipping it**
|
616
|
+
df:gpd.GeoDataFrame = gpd.read_file(file_path, layer=layer, mask=St_Area)
|
617
|
+
|
618
|
+
# Force Lambert72 -> EPSG:31370
|
619
|
+
df.to_crs("EPSG:31370", inplace=True)
|
620
|
+
|
621
|
+
df.to_file(geopackage, layer=layer, mode='w')
|
622
|
+
|
623
|
+
return "Saved the clipped " +str(layer)+ " to GPKG"
|
624
|
+
|
625
|
+
def data_modification(input_database:str,
|
626
|
+
layer:str,
|
627
|
+
output_database:str,
|
628
|
+
picc:gpd.GeoDataFrame,
|
629
|
+
capa:gpd.GeoDataFrame ):
|
630
|
+
"""
|
631
|
+
Apply the data modifications as described in the LEMA report
|
632
|
+
|
633
|
+
FIXME : Add more doc in this docstring
|
634
|
+
|
635
|
+
:param input_database: the path to the input database
|
636
|
+
:param layer: the layer name in the database
|
637
|
+
:param output_database: the path to the output database
|
638
|
+
:param picc: the PICC Walloon file -- Preloaded
|
639
|
+
:param capa: the Cadastre Walloon file -- Preloaded
|
640
|
+
"""
|
641
|
+
|
642
|
+
df1:gpd.GeoDataFrame
|
643
|
+
df2:gpd.GeoDataFrame
|
644
|
+
|
645
|
+
LAYERS_WALOUS = ["WALOUS_2018_LB72_112",
|
646
|
+
"WALOUS_2018_LB72_31",
|
647
|
+
"WALOUS_2018_LB72_32",
|
648
|
+
"WALOUS_2018_LB72_331",
|
649
|
+
"WALOUS_2018_LB72_332",
|
650
|
+
"WALOUS_2018_LB72_333",
|
651
|
+
"WALOUS_2018_LB72_34"]
|
652
|
+
|
653
|
+
input_database = str(input_database)
|
654
|
+
layer = str(layer)
|
655
|
+
output_database = str(output_database)
|
656
|
+
|
657
|
+
df:gpd.GeoDataFrame = gpd.read_file(input_database, layer = layer)
|
658
|
+
x1,y1 = df.shape
|
659
|
+
a = df.geom_type.unique()
|
660
|
+
#print(layers[i])
|
661
|
+
x,=a.shape
|
662
|
+
if x1>0:
|
663
|
+
if layer in LAYERS_WALOUS: #Walous layers changed to PICC buidings
|
664
|
+
#print("walous")
|
665
|
+
|
666
|
+
assert picc.crs == df.crs, "CRS of PICC and input data do not match"
|
667
|
+
|
668
|
+
df1= gpd.sjoin(picc, df, how="inner", predicate="intersects" )
|
669
|
+
cols=df.columns
|
670
|
+
cols = np.append(cols, "GEOREF_ID")
|
671
|
+
cols = np.append(cols, "NATUR_CODE")
|
672
|
+
df1=df1[cols]
|
673
|
+
df1.to_file(output_database,layer=layer)
|
674
|
+
elif layer =="BDREF_DGO3_PASH__SCHEMA_STATIONS_EPU": #Change BDREF based on AJOUT_PDET sent by Perrine
|
675
|
+
#print("yes")
|
676
|
+
df1 = gpd.read_file(os.getcwd()+"//INPUT//EPU_STATIONS_NEW//AJOUT_PDET_EPU_DG03_STATIONS.shp")
|
677
|
+
|
678
|
+
assert df1.crs == df.crs, "CRS of AJOUT_PDET and input data do not match"
|
679
|
+
|
680
|
+
df2 = gpd.sjoin(picc, df1, how="inner", predicate="intersects" )
|
681
|
+
df2.to_file(output_database, layer=layer)
|
682
|
+
elif layer =="INFRASIG_SOINS_SANTE__ETAB_AINES":
|
683
|
+
|
684
|
+
assert capa.crs == df.crs, "CRS of CaPa and input data do not match"
|
685
|
+
|
686
|
+
df1= gpd.sjoin(capa, df, how="inner", predicate="intersects" )
|
687
|
+
cols=df.columns
|
688
|
+
#print(cols)
|
689
|
+
cols = np.append(cols, "CaPaKey")
|
690
|
+
#print(cols)
|
691
|
+
df1=df1[cols]
|
692
|
+
df2=gpd.sjoin(picc, df1, how="inner", predicate="intersects" )
|
693
|
+
cols = np.append(cols, "GEOREF_ID")
|
694
|
+
cols = np.append(cols, "NATUR_CODE")
|
695
|
+
#df2=df2[cols]
|
696
|
+
#print(df2.columns)
|
697
|
+
df2.to_file(output_database, layer=layer)
|
698
|
+
|
699
|
+
elif a[0,]=="Point" and layer!="BDREF_DGO3_PASH__SCHEMA_STATIONS_EPU" and layer!="INFRASIG_SOINS_SANTE__ETAB_AINES":
|
700
|
+
|
701
|
+
assert capa.crs == df.crs, "CRS of CaPa and input data do not match"
|
702
|
+
assert picc.crs == df.crs, "CRS of PICC and input data do not match"
|
703
|
+
|
704
|
+
df1= gpd.sjoin(capa, df, how="inner", predicate="intersects" )
|
705
|
+
cols=df.columns
|
706
|
+
#print(cols)
|
707
|
+
cols = np.append(cols, "CaPaKey")
|
708
|
+
#print(cols)
|
709
|
+
df1=df1[cols]
|
710
|
+
df2=gpd.sjoin(picc, df1, how="inner", predicate="intersects" )
|
711
|
+
cols = np.append(cols, "GEOREF_ID")
|
712
|
+
cols = np.append(cols, "NATUR_CODE")
|
713
|
+
df2=df2[cols]
|
714
|
+
#print(df2.columns)
|
715
|
+
df2.to_file(output_database, layer=layer)
|
716
|
+
#print(layers[i])
|
717
|
+
elif layer =="Hab_2018_CABU":
|
718
|
+
df1=df[df["NbsHabTOT"]>0]
|
719
|
+
#print(df1.shape)
|
720
|
+
df1.to_file(output_database, layer=layer)
|
721
|
+
elif layer =="INFRASIG_ROUTE_RES_ROUTIER_TE_AXES":
|
722
|
+
df1=df.buffer(6, cap_style=2)
|
723
|
+
df1.to_file(output_database, layer=layer)
|
724
|
+
else:
|
725
|
+
df.to_file(output_database, layer=layer)
|
726
|
+
else:
|
727
|
+
logging.info("skipped" + str(layer) + "due to no polygon in the study area")
|
728
|
+
|
729
|
+
def vector_to_raster(layer:str,
|
730
|
+
vector_input:Path,
|
731
|
+
extent:Path,
|
732
|
+
attribute:str,
|
733
|
+
pixel_size:float):
|
734
|
+
"""
|
735
|
+
Convert a vector layer to a raster tiff file
|
736
|
+
|
737
|
+
:param layer: the layer name in the GDB file
|
738
|
+
:param vector_input: the path to the vector file
|
739
|
+
:param extent: the path to the extent file
|
740
|
+
:param attribute: the attribute to rasterize
|
741
|
+
:param pixel_size: the pixel size of the raster
|
742
|
+
|
743
|
+
"""
|
744
|
+
|
745
|
+
old_dir = os.getcwd()
|
746
|
+
|
747
|
+
layer = str(layer)
|
748
|
+
vector_input = Path(vector_input)
|
749
|
+
extent = Path(extent)
|
750
|
+
attribute = str(attribute)
|
751
|
+
pixel_size = float(pixel_size)
|
752
|
+
|
753
|
+
OUT_DIR = vector_input.parent / "VULNERABILITY/RASTERS" / attribute
|
754
|
+
OUT_NAME = layer + ".tiff"
|
755
|
+
|
756
|
+
OUT_DIR.mkdir(parents=True, exist_ok=True)
|
757
|
+
|
758
|
+
if (OUT_DIR/OUT_NAME).exists():
|
759
|
+
os.remove(OUT_DIR/OUT_NAME)
|
760
|
+
|
761
|
+
os.chdir(OUT_DIR)
|
762
|
+
|
763
|
+
NoData_value = 0
|
764
|
+
|
765
|
+
extent_ds:ogr.DataSource = ogr.Open(str(extent))
|
766
|
+
extent_layer = extent_ds.GetLayer()
|
767
|
+
|
768
|
+
x_min, x_max, y_min, y_max = extent_layer.GetExtent()
|
769
|
+
|
770
|
+
x_min = float(int(x_min))
|
771
|
+
x_max = float(np.ceil(x_max))
|
772
|
+
y_min = float(int(y_min))
|
773
|
+
y_max = float(np.ceil(y_max))
|
774
|
+
|
775
|
+
# Open the data sources and read the extents
|
776
|
+
source_ds:ogr.DataSource = ogr.Open(str(vector_input))
|
777
|
+
source_layer = source_ds.GetLayer(layer)
|
778
|
+
|
779
|
+
# Create the destination data source
|
780
|
+
x_res = int((x_max - x_min) / pixel_size)
|
781
|
+
y_res = int((y_max - y_min) / pixel_size)
|
782
|
+
target_ds:gdal.Driver = gdal.GetDriverByName('GTiff').Create(str(OUT_NAME),
|
783
|
+
x_res, y_res, 1,
|
784
|
+
gdal.GDT_Byte,
|
785
|
+
options=["COMPRESS=LZW"])
|
786
|
+
|
787
|
+
target_ds.SetGeoTransform((x_min, pixel_size, 0, y_max, 0, -pixel_size))
|
788
|
+
srs = osr.SpatialReference()
|
789
|
+
srs.ImportFromEPSG(31370)
|
790
|
+
target_ds.SetProjection(srs.ExportToWkt())
|
791
|
+
|
792
|
+
band = target_ds.GetRasterBand(1)
|
793
|
+
band.SetNoDataValue(NoData_value)
|
794
|
+
|
795
|
+
# Rasterize the areas
|
796
|
+
gdal.RasterizeLayer(target_ds, [1], source_layer, options=["ATTRIBUTE="+attribute, "ALL_TOUCHED=TRUE"])
|
797
|
+
target_ds = None
|
798
|
+
|
799
|
+
os.chdir(old_dir)
|
800
|
+
|
801
|
+
def Comp_Vulnerability(dirsnames:Accept_Manager):
|
802
|
+
"""
|
803
|
+
Compute the vulnerability for the Study Area
|
804
|
+
|
805
|
+
This function **will not modify** the data by the removed buildings/scenarios.
|
806
|
+
|
807
|
+
:param dirsnames: the Dirs_Names object from the calling function
|
808
|
+
"""
|
809
|
+
|
810
|
+
rasters_vuln = dirsnames.get_files_in_rasters_vulne()
|
811
|
+
rasters_code = dirsnames.get_files_in_rasters_code()
|
812
|
+
|
813
|
+
logging.info("Number of files",len(rasters_vuln))
|
814
|
+
|
815
|
+
ds:gdal.Dataset = gdal.Open(str(rasters_vuln[0]))
|
816
|
+
ds1:gdal.Dataset = gdal.Open(str(rasters_code[0]))
|
817
|
+
|
818
|
+
tmp_vuln = np.array(ds.GetRasterBand(1).ReadAsArray())
|
819
|
+
tmp_code = np.array(ds1.GetRasterBand(1).ReadAsArray())
|
820
|
+
|
821
|
+
x, y = tmp_vuln.shape
|
822
|
+
|
823
|
+
logging.info("Computing Vulnerability")
|
824
|
+
|
825
|
+
array_vuln = np.zeros((x, y), dtype=np.int8)
|
826
|
+
array_code = np.zeros((x, y), dtype=np.int8)
|
827
|
+
|
828
|
+
for i in tqdm(range(len(rasters_vuln))):
|
829
|
+
logging.info("Computing layer {} / {}".format(i, len(rasters_vuln)))
|
830
|
+
ds = gdal.Open(str(rasters_vuln[i]))
|
831
|
+
ds1 = gdal.Open(str(rasters_code[i]))
|
832
|
+
|
833
|
+
tmp_vuln = ds.GetRasterBand(1).ReadAsArray()
|
834
|
+
tmp_code = ds1.GetRasterBand(1).ReadAsArray()
|
835
|
+
|
836
|
+
ij = np.where(tmp_vuln >= array_vuln)
|
837
|
+
array_vuln[ij] = tmp_vuln.max()
|
838
|
+
array_code[ij] = tmp_code.max()
|
839
|
+
|
840
|
+
ij = np.where(array_vuln == 0)
|
841
|
+
array_vuln[ij] = 1
|
842
|
+
array_code[ij] = 1
|
843
|
+
|
844
|
+
dst_filename= str(dirsnames.SA_VULN)
|
845
|
+
y_pixels, x_pixels = array_vuln.shape # number of pixels in x
|
846
|
+
|
847
|
+
driver = gdal.GetDriverByName('GTiff')
|
848
|
+
dataset = driver.Create(dst_filename, x_pixels, y_pixels, gdal.GDT_Byte, 1, options=["COMPRESS=LZW"])
|
849
|
+
dataset.GetRasterBand(1).WriteArray(array_vuln.astype(np.int8))
|
850
|
+
# follow code is adding GeoTranform and Projection
|
851
|
+
geotrans = ds.GetGeoTransform() # get GeoTranform from existed 'data0'
|
852
|
+
proj = ds.GetProjection() # you can get from a exsited tif or import
|
853
|
+
dataset.SetGeoTransform(geotrans)
|
854
|
+
dataset.SetProjection(proj)
|
855
|
+
dataset.FlushCache()
|
856
|
+
dataset = None
|
857
|
+
|
858
|
+
|
859
|
+
dst_filename= str(dirsnames.SA_CODE)
|
860
|
+
y_pixels, x_pixels = array_code.shape # number of pixels in x
|
861
|
+
driver = gdal.GetDriverByName('GTiff')
|
862
|
+
dataset = driver.Create(dst_filename, x_pixels, y_pixels, gdal.GDT_Byte, 1, options=["COMPRESS=LZW"])
|
863
|
+
dataset.GetRasterBand(1).WriteArray(array_code.astype(np.int8))
|
864
|
+
# follow code is adding GeoTranform and Projection
|
865
|
+
geotrans = ds.GetGeoTransform() # get GeoTranform from existed 'data0'
|
866
|
+
proj = ds.GetProjection() # you can get from a exsited tif or import
|
867
|
+
dataset.SetGeoTransform(geotrans)
|
868
|
+
dataset.SetProjection(proj)
|
869
|
+
dataset.FlushCache()
|
870
|
+
dataset = None
|
871
|
+
|
872
|
+
logging.info("Computed Vulnerability for the Study Area - Done")
|
873
|
+
|
874
|
+
def Comp_Vulnerability_Scen(dirsnames:Accept_Manager):
|
875
|
+
""" Compute the vulnerability for the scenario
|
876
|
+
|
877
|
+
This function **will modify** the data by the removed buildings/scenarios.
|
878
|
+
|
879
|
+
FIXME: It could be interseting to permit the user to provide tiff files for the removed buildings and other scenarios.
|
880
|
+
|
881
|
+
:param dirsnames: the Dirs_Names object from the calling function
|
882
|
+
"""
|
883
|
+
|
884
|
+
array_vuln = gdal.Open(str(dirsnames.SA_VULN))
|
885
|
+
geotrans = array_vuln.GetGeoTransform() # get GeoTranform from existed 'data0'
|
886
|
+
proj = array_vuln.GetProjection() # you can get from a exsited tif or import
|
887
|
+
|
888
|
+
array_vuln = np.array(array_vuln.GetRasterBand(1).ReadAsArray())
|
889
|
+
|
890
|
+
array_code = gdal.Open(str(dirsnames.SA_CODE))
|
891
|
+
array_code = np.array(array_code.GetRasterBand(1).ReadAsArray())
|
892
|
+
|
893
|
+
Rbu = dirsnames.get_files_in_rm_buildings()
|
894
|
+
|
895
|
+
if len(Rbu)>0:
|
896
|
+
for curfile in Rbu:
|
897
|
+
array_mod = gdal.Open(str(curfile))
|
898
|
+
array_mod = np.array(array_mod.GetRasterBand(1).ReadAsArray())
|
899
|
+
|
900
|
+
ij = np.where(array_mod == 1)
|
901
|
+
array_vuln[ij] = 1
|
902
|
+
array_code[ij] = 1
|
903
|
+
|
904
|
+
dst_filename= str(dirsnames.TMP_VULN)
|
905
|
+
y_pixels, x_pixels = array_vuln.shape # number of pixels in x
|
906
|
+
|
907
|
+
driver = gdal.GetDriverByName('GTiff')
|
908
|
+
dataset = driver.Create(dst_filename, x_pixels, y_pixels, gdal.GDT_Byte, 1, options=["COMPRESS=LZW"])
|
909
|
+
dataset.GetRasterBand(1).WriteArray(array_vuln.astype(np.int8))
|
910
|
+
# follow code is adding GeoTranform and Projection
|
911
|
+
dataset.SetGeoTransform(geotrans)
|
912
|
+
dataset.SetProjection(proj)
|
913
|
+
dataset.FlushCache()
|
914
|
+
dataset = None
|
915
|
+
|
916
|
+
|
917
|
+
dst_filename= str(dirsnames.TMP_CODE)
|
918
|
+
y_pixels, x_pixels = array_code.shape # number of pixels in x
|
919
|
+
driver = gdal.GetDriverByName('GTiff')
|
920
|
+
dataset = driver.Create(dst_filename, x_pixels, y_pixels, gdal.GDT_Byte, 1, options=["COMPRESS=LZW"])
|
921
|
+
dataset.GetRasterBand(1).WriteArray(array_code.astype(np.int8))
|
922
|
+
# follow code is adding GeoTranform and Projection
|
923
|
+
dataset.SetGeoTransform(geotrans)
|
924
|
+
dataset.SetProjection(proj)
|
925
|
+
dataset.FlushCache()
|
926
|
+
dataset = None
|
927
|
+
|
928
|
+
logging.info("Computed Vulnerability for the scenario")
|
929
|
+
|
930
|
+
def match_vuln_modrec(inRas:Path, outRas:Path, MODREC:Path):
|
931
|
+
"""
|
932
|
+
Clip the raster to the MODREC/simulation extent
|
933
|
+
|
934
|
+
:param inRas: the input raster file
|
935
|
+
:param outRas: the output raster file
|
936
|
+
:param MODREC: the MODREC/simulation extent file
|
937
|
+
|
938
|
+
"""
|
939
|
+
|
940
|
+
inRas = str(inRas)
|
941
|
+
outRas = str(outRas)
|
942
|
+
MODREC = str(MODREC)
|
943
|
+
|
944
|
+
data = gdal.Open(MODREC, gdalconst.GA_ReadOnly)
|
945
|
+
geoTransform = data.GetGeoTransform()
|
946
|
+
minx = geoTransform[0]
|
947
|
+
maxy = geoTransform[3]
|
948
|
+
maxx = minx + geoTransform[1] * data.RasterXSize
|
949
|
+
miny = maxy + geoTransform[5] * data.RasterYSize
|
950
|
+
ds = gdal.Open(inRas)
|
951
|
+
ds = gdal.Translate(outRas, ds, projWin = [minx, maxy, maxx, miny])
|
952
|
+
ds = None
|
953
|
+
|
954
|
+
def VulMod(dirsnames:Accept_Manager,
|
955
|
+
model_h:np.ndarray,
|
956
|
+
vulnerability:np.ndarray,
|
957
|
+
interval:int,
|
958
|
+
geo_projection):
|
959
|
+
|
960
|
+
"""
|
961
|
+
Compute the local acceptability based on :
|
962
|
+
- the vulnerability
|
963
|
+
- the water depth
|
964
|
+
- the matrices
|
965
|
+
|
966
|
+
:param dirsnames: the Dirs_Names object from the calling function
|
967
|
+
:param model_h: the water depth matrix
|
968
|
+
:param vulnerability: the vulnerability matrix
|
969
|
+
:param interval: the return period
|
970
|
+
:param geo_projection: the geotransform and the projection - tuple extracted from another raster file
|
971
|
+
|
972
|
+
"""
|
973
|
+
|
974
|
+
logging.info(interval)
|
975
|
+
|
976
|
+
Qfile = pd.read_csv(dirsnames.POINTS_CSV)
|
977
|
+
|
978
|
+
Qfile = Qfile[Qfile["Interval"]==interval]
|
979
|
+
Qfile = Qfile.reset_index()
|
980
|
+
|
981
|
+
x,y = vulnerability.shape
|
982
|
+
accept = np.zeros((x,y))
|
983
|
+
|
984
|
+
ij_1 = np.where(vulnerability == 1)
|
985
|
+
ij_2 = np.where(vulnerability == 2)
|
986
|
+
ij_3 = np.where(vulnerability == 3)
|
987
|
+
ij_4 = np.where(vulnerability == 4)
|
988
|
+
ij_5 = np.where(vulnerability == 5)
|
989
|
+
|
990
|
+
bounds = [(0., 0.02), (0.02, 0.3), (0.3, 1), (1, 2.5), (2.5, 1000)]
|
991
|
+
|
992
|
+
accept_1 = [Qfile["h-0"][4], Qfile["h-0.02"][4], Qfile["h-0.3"][4], Qfile["h-1"][4], Qfile["h-2.5"][4]]
|
993
|
+
accept_2 = [Qfile["h-0"][3], Qfile["h-0.02"][3], Qfile["h-0.3"][3], Qfile["h-1"][3], Qfile["h-2.5"][3]]
|
994
|
+
accept_3 = [Qfile["h-0"][2], Qfile["h-0.02"][2], Qfile["h-0.3"][2], Qfile["h-1"][2], Qfile["h-2.5"][2]]
|
995
|
+
accept_4 = [Qfile["h-0"][1], Qfile["h-0.02"][1], Qfile["h-0.3"][1], Qfile["h-1"][1], Qfile["h-2.5"][1]]
|
996
|
+
accept_5 = [Qfile["h-0"][0], Qfile["h-0.02"][0], Qfile["h-0.3"][0], Qfile["h-1"][0], Qfile["h-2.5"][0]]
|
997
|
+
|
998
|
+
accept[:,:] = -99999
|
999
|
+
for ij, loc_accept in zip([ij_1, ij_2, ij_3, ij_4, ij_5], [accept_1, accept_2, accept_3, accept_4, accept_5]):
|
1000
|
+
if len(ij[0]) > 0:
|
1001
|
+
for idx, (min_bound, max_bound) in enumerate(bounds):
|
1002
|
+
loc_ij = np.where((model_h[ij] > min_bound) & (model_h[ij] <= max_bound))
|
1003
|
+
accept[ij[0][loc_ij], ij[1][loc_ij]] = loc_accept[idx]
|
1004
|
+
|
1005
|
+
#save raster
|
1006
|
+
dst_filename = str(dirsnames.TMP_QFILES / "Q{}.tif".format(interval))
|
1007
|
+
|
1008
|
+
y_pixels, x_pixels = accept.shape # number of pixels in x
|
1009
|
+
driver = gdal.GetDriverByName('GTiff')
|
1010
|
+
dataset = driver.Create(dst_filename, x_pixels, y_pixels, 1, gdal.GDT_Float32, options=["COMPRESS=LZW"])
|
1011
|
+
dataset.GetRasterBand(1).WriteArray(accept.astype(np.float32))
|
1012
|
+
|
1013
|
+
geotrans, proj = geo_projection
|
1014
|
+
dataset.SetGeoTransform(geotrans)
|
1015
|
+
dataset.SetProjection(proj)
|
1016
|
+
dataset.FlushCache()
|
1017
|
+
dataset = None
|
1018
|
+
|
1019
|
+
def shp_to_raster(vector_fn:str, raster_fn:str, pixel_size:float = 1.):
|
1020
|
+
"""
|
1021
|
+
Convert a vector layer to a raster tiff file
|
1022
|
+
|
1023
|
+
:param vector_fn: the path to the vector file
|
1024
|
+
:param raster_fn: the path to the raster file
|
1025
|
+
:param pixel_size: the pixel size of the raster
|
1026
|
+
"""
|
1027
|
+
|
1028
|
+
# Force the input to be a string
|
1029
|
+
vector_fn = str(vector_fn)
|
1030
|
+
raster_fn = str(raster_fn)
|
1031
|
+
|
1032
|
+
NoData_value = np.nan
|
1033
|
+
# Open the data sources and read the extents
|
1034
|
+
source_ds = ogr.Open(vector_fn)
|
1035
|
+
source_layer = source_ds.GetLayer()
|
1036
|
+
x_min, x_max, y_min, y_max = source_layer.GetExtent()
|
1037
|
+
|
1038
|
+
x_min = float(int(x_min))
|
1039
|
+
x_max = float(np.ceil(x_max))
|
1040
|
+
y_min = float(int(y_min))
|
1041
|
+
y_max = float(np.ceil(y_max))
|
1042
|
+
|
1043
|
+
# Create the destination data source
|
1044
|
+
x_res = int((x_max - x_min) / pixel_size)
|
1045
|
+
y_res = int((y_max - y_min) / pixel_size)
|
1046
|
+
target_ds = gdal.GetDriverByName('GTiff').Create(raster_fn, x_res, y_res, 1, gdal.GDT_Float64,
|
1047
|
+
options=["COMPRESS=LZW"])
|
1048
|
+
|
1049
|
+
target_ds.SetGeoTransform((x_min, pixel_size, 0, y_max, 0, -pixel_size))
|
1050
|
+
srs = osr.SpatialReference()
|
1051
|
+
srs.ImportFromEPSG(31370)
|
1052
|
+
target_ds.SetProjection(srs.ExportToWkt())
|
1053
|
+
band = target_ds.GetRasterBand(1)
|
1054
|
+
band.SetNoDataValue(NoData_value)
|
1055
|
+
# Rasterize the areas
|
1056
|
+
gdal.RasterizeLayer(target_ds, [1], source_layer,None, None, [1], options=["ALL_TOUCHED=TRUE"])
|
1057
|
+
target_ds = None
|
1058
|
+
vector_fn = raster_fn = None
|