wolfhece 2.1.23__py3-none-any.whl → 2.1.27__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1427 @@
1
+ import geopandas as gpd
2
+ import pandas as pd
3
+ import numpy as np
4
+ from osgeo import gdal, ogr, osr, gdalconst
5
+ import os
6
+ import glob
7
+ from pathlib import Path
8
+ import logging
9
+ from tqdm import tqdm
10
+ from pyogrio import list_layers, read_dataframe
11
+ from enum import Enum
12
+ import numba as nb
13
+ from numba import cuda
14
+
15
+ ENGINE = 'pyogrio' # or 'Fiona -- Pyogrio is faster
16
+ EXTENT = '.gpkg'
17
+ class Modif_Type(Enum):
18
+ """
19
+ Enum class for the type of modification
20
+ """
21
+
22
+ WALOUS = 'Walous layers changed to PICC buidings'
23
+ POINT2POLY_EPURATION = 'Change BDREF based on AJOUT_PDET sent by Perrine (SPI)'
24
+ POINT2POLY_PICC = 'Convert the points to polygons based on PICC'
25
+ POINT2POLY_CAPAPICC = 'Convert the points to polygons based on PICC and CaPa'
26
+ INHABITED = 'Select only inhabited buildings'
27
+ ROAD = 'Create a buffer around the roads'
28
+ COPY = 'Copy the data'
29
+
30
+ class Vulnerability_csv():
31
+
32
+ def __init__(self, file:Path) -> None:
33
+ self.file = file
34
+ self.data = pd.read_csv(file, sep=",", encoding='latin-1')
35
+
36
+ def get_layers(self) -> list:
37
+ return [a[1] for a in self.data["Path"].str.split('/')]
38
+
39
+ def get_vulnerability_level(self, layer:str) -> str:
40
+ idx = self.get_layers().index(layer)
41
+ return self.data.iloc[idx]["Vulne"]
42
+
43
+ def get_vulnerability_code(self, layer:str) -> str:
44
+ idx = self.get_layers().index(layer)
45
+ return self.data.iloc[idx]["Code"]
46
+
47
+
48
+ def get_data_type(fname:Path):
49
+
50
+ fname = Path(fname)
51
+ """ Get the data type of the input file from extension """
52
+ if fname.name.endswith('.gpkg'):
53
+ return 'GPKG'
54
+ elif fname.name.endswith('.shp'):
55
+ return 'ESRI Shapefile'
56
+ elif fname.name.endswith('.gdb'):
57
+ return 'OpenfileGDB'
58
+ else:
59
+ return None
60
+
61
+ def cleaning_directory(dir:Path):
62
+ """ Cleaning the directory """
63
+
64
+ logging.info("Cleaning the directory {}".format(dir))
65
+
66
+ files_in_output = list(dir.iterdir())
67
+ for item in files_in_output:
68
+ if item.is_file():
69
+ os.remove(item)
70
+
71
+ class Accept_Manager():
72
+ """
73
+ Structure to store the directories and names of the files.
74
+
75
+ In the main directory, the following directories are mandatory/created:
76
+ - INPUT : filled by the user - contains the input data
77
+ - TEMP : created by the script - contains the temporary data for the study area
78
+ - OUTPUT: created by the script - contains the output data for each scenario of the study area
79
+
80
+ The INPUT directory contains the following subdirectories:
81
+ - DATABASE: contains the data for the **entire Walloon region**
82
+ - Cadastre_Walloon.gpkg: the Cadastre Walloon file
83
+ - GT_Resilence_dataRisques202010.gdb: the original gdb file from SPW - GT Resilience
84
+ - PICC-vDIFF.gdb: the PICC Walloon file
85
+ - CE_IGN_TOP10V: the IGN top10v shapefile
86
+ - EPU_STATIONS_NEW:
87
+ - AJOUT_PDET_EPU_DG03_STATIONS.shp: the EPU stations shapefile
88
+ - STUDY_AREA: contains the study area shapefiles - one for each study area - e.g. Bassin_Vesdre.shp
89
+ - CSVs: contains the CSV files
90
+ - Intermediate.csv: contains the matrices data for the acceptability computation
91
+ # - Ponderation.csv: contains the ponderation data for the acceptability computation
92
+ - Vulnerability.csv: contains the mapping between layers and vulnerability levels - a code value is also provided
93
+ - WATER_DEPTH: contains the water depth data for each scenario
94
+ - Study_area1:
95
+ - Scenario1
96
+ - Scenario2
97
+ -...
98
+ - ScenarioN
99
+ - Study_area2:
100
+ - Scenario1
101
+ - Scenario2
102
+ -...
103
+ - ScenarioN
104
+ -...
105
+ - Study_areaN:
106
+ - Scenario1
107
+ - Scenario2
108
+ -...
109
+ - ScenarioN
110
+
111
+ The TEMP directory contains the following subdirectories:
112
+ - DATABASES: contains the temporary data each study area
113
+ - Study_area1:
114
+ - database.gpkg: the clipped database
115
+ - CaPa.gpkg: the clipped Cadastre Walloon file
116
+ - PICC.gpkg: the clipped PICC Walloon file
117
+ - database_final.gpkg: the final database
118
+ - database_final_V.gpkg: the final database with vulnerability levels
119
+ - CE_IGN_TOP10V.tiff: the IGN top10v raster file
120
+ - Maske_River_extent.tiff: the river extent raster file from IGN
121
+ - VULNERABILITY: the vulnerability data
122
+ - RASTERS:
123
+ - Code : one file for each layer
124
+ - Vulne : one file for each layer
125
+ - Scenario1:
126
+
127
+ """
128
+
129
+ def __init__(self,
130
+ main_dir:str = 'Data',
131
+ Study_area:str = 'Bassin_Vesdre.shp',
132
+ scenario = None,
133
+ Original_gdb:str = 'GT_Resilence_dataRisques202010.gdb',
134
+ CaPa_Walloon:str = 'Cadastre_Walloon.gpkg',
135
+ PICC_Walloon:str = 'PICC_vDIFF.gdb',
136
+ CE_IGN_top10v:str = 'CE_IGN_TOP10V/CE_IGN_TOP10V.shp',
137
+ EPU_Stations:str = 'AJOUT_PDET_EPU_DG03_STATIONS.shp'
138
+ ) -> None:
139
+
140
+ self.old_dir:Path = Path(os.getcwd())
141
+
142
+ self.main_dir:Path = Path(main_dir)
143
+
144
+ # If it is a string, concatenate it with the current directory
145
+ if not self.main_dir.is_absolute():
146
+ self.main_dir = Path(os.getcwd()) / self.main_dir
147
+
148
+ self._study_area = Study_area
149
+ if Study_area is not None:
150
+ if not self._study_area.endswith('.shp'):
151
+ self._study_area += '.shp'
152
+
153
+ self._scenario = scenario
154
+ self._original_gdb = Original_gdb
155
+ self._capa_walloon = CaPa_Walloon
156
+ self._picc_walloon = PICC_Walloon
157
+ self._ce_ign_top10v = CE_IGN_top10v
158
+
159
+ self.IN_DIR = self.main_dir / "INPUT"
160
+ self.IN_DATABASE = self.IN_DIR / "DATABASE"
161
+ self.IN_STUDY_AREA = self.IN_DIR / "STUDY_AREA"
162
+ self.IN_CSV = self.IN_DIR / "CSVs"
163
+ self.IN_WATER_DEPTH = self.IN_DIR / "WATER_DEPTH"
164
+ self.IN_EPU_STATIONS= self.IN_DIR / "EPU_STATIONS_NEW"
165
+
166
+ self.ORIGINAL_GDB = self.IN_DATABASE / self._original_gdb
167
+ self.CAPA_WALLOON = self.IN_DATABASE / self._capa_walloon
168
+ self.PICC_WALLOON = self.IN_DATABASE / self._picc_walloon
169
+ self.CE_IGN_TOP10V = self.IN_DATABASE / self._ce_ign_top10v
170
+ self.EPU_STATIONS = self.IN_EPU_STATIONS / EPU_Stations
171
+
172
+ self.VULNERABILITY_CSV = self.IN_CSV / "Vulnerability.csv"
173
+ self.POINTS_CSV = self.IN_CSV / "Intermediate.csv"
174
+ # self.PONDERATION_CSV = self.IN_CSV / "Ponderation.csv"
175
+
176
+ self._CSVs = [self.VULNERABILITY_CSV, self.POINTS_CSV] #, self.PONDERATION_CSV]
177
+ self._GPKGs= [self.CAPA_WALLOON, self.PICC_WALLOON]
178
+ self._GDBs = [self.ORIGINAL_GDB]
179
+ self._SHPs = [self.CE_IGN_TOP10V, self.EPU_STATIONS]
180
+ self._ALLS = self._CSVs + self._GPKGs + self._GDBs + self._SHPs
181
+
182
+ self.TMP_DIR = self.main_dir / "TEMP"
183
+
184
+ self.OUT_DIR = self.main_dir / "OUTPUT"
185
+
186
+ self.points2polys = []
187
+ self.lines2polys = []
188
+
189
+ self.create_paths()
190
+ self.create_paths_scenario()
191
+
192
+ def create_paths(self):
193
+ """ Create the paths for the directories and files """
194
+
195
+ self.points2polys = []
196
+ self.lines2polys = []
197
+
198
+ if self._study_area is not None:
199
+
200
+ self.Study_area:Path = Path(self._study_area)
201
+
202
+ self.TMP_STUDYAREA = self.TMP_DIR / self.Study_area.stem
203
+ self.TMP_DATABASE = self.TMP_STUDYAREA / "DATABASES"
204
+
205
+ self.TMP_CLIPGDB = self.TMP_DATABASE / "CLIP_GDB"
206
+ self.TMP_CADASTER = self.TMP_DATABASE / "CLIP_CADASTER"
207
+ self.TMP_PICC = self.TMP_DATABASE / "CLIP_PICC"
208
+ self.TMP_IGNCE = self.TMP_DATABASE / "CLIP_IGN_CE"
209
+ self.TMP_WMODIF = self.TMP_DATABASE / "WITH_MODIF"
210
+ self.TMP_CODEVULNE = self.TMP_DATABASE / "CODE_VULNE"
211
+
212
+ self.TMP_VULN_DIR = self.TMP_STUDYAREA / "VULNERABILITY"
213
+ self.TMP_RASTERS = self.TMP_VULN_DIR / "RASTERS"
214
+ self.TMP_RASTERS_CODE = self.TMP_RASTERS / "Code"
215
+ self.TMP_RASTERS_VULNE = self.TMP_RASTERS / "Vulne"
216
+
217
+ self.OUT_STUDY_AREA = self.OUT_DIR / self.Study_area.stem
218
+
219
+ self.SA = self.IN_STUDY_AREA / self.Study_area
220
+
221
+ # self.SA_DATABASE = self.TMP_STUDYAREA / "database.gpkg"
222
+ # self.SA_CAPA = self.TMP_STUDYAREA / "CaPa.gpkg"
223
+ # self.SA_PICC = self.TMP_STUDYAREA / "PICC.gpkg"
224
+ self.SA_FINAL = self.TMP_STUDYAREA / "database_final.gpkg"
225
+ self.SA_FINAL_V = self.TMP_STUDYAREA / "database_final_V.gpkg"
226
+ self.SA_MASKED_RIVER = self.TMP_IGNCE / "CE_IGN_TOP10V.tiff"
227
+
228
+ self.SA_VULN = self.TMP_VULN_DIR / "Vulnerability.tiff"
229
+ self.SA_CODE = self.TMP_VULN_DIR / "Vulnerability_Code.tiff"
230
+
231
+ else:
232
+ self.Study_area = None
233
+ self._scenario = None
234
+
235
+ self.TMP_STUDYAREA = None
236
+ self.TMP_DATABASE = None
237
+ self.TMP_CADASTER = None
238
+ self.TMP_PICC = None
239
+ self.TMP_IGNCE = None
240
+ self.TMP_WMODIF = None
241
+ self.TMP_CODEVULNE = None
242
+ self.TMP_VULN_DIR = None
243
+ self.TMP_RASTERS = None
244
+ self.TMP_RASTERS_CODE = None
245
+ self.TMP_RASTERS_VULNE = None
246
+
247
+ self.OUT_STUDY_AREA = None
248
+
249
+ self.SA = None
250
+ self.SA_DATABASE = None
251
+ self.SA_CAPA = None
252
+ self.SA_PICC = None
253
+ self.SA_FINAL = None
254
+ self.SA_FINAL_V = None
255
+ self.SA_MASKED_RIVER = None
256
+
257
+ self.SA_VULN = None
258
+ self.SA_CODE = None
259
+
260
+ self.create_paths_scenario()
261
+
262
+ self.check_inputs()
263
+ self.check_temporary()
264
+ self.check_outputs()
265
+
266
+ def create_paths_scenario(self):
267
+
268
+ if self._scenario is not None:
269
+
270
+ self.scenario:str = str(self._scenario)
271
+
272
+ self.IN_SCEN_DIR = self.IN_WATER_DEPTH / self.SA.stem / self.scenario
273
+ self.IN_RM_BUILD_DIR = self.IN_SCEN_DIR / "REMOVED_BUILDINGS"
274
+
275
+ self.TMP_SCEN_DIR = self.TMP_VULN_DIR / self.scenario
276
+ self.TMP_RM_BUILD_DIR = self.TMP_SCEN_DIR / "REMOVED_BUILDINGS"
277
+ self.TMP_QFILES = self.TMP_SCEN_DIR / "Q_FILES"
278
+
279
+ self.TMP_VULN = self.TMP_SCEN_DIR / "Vulnerability.tiff"
280
+ self.TMP_CODE = self.TMP_SCEN_DIR / "Vulnerability_Code.tiff"
281
+
282
+ self.OUT_SCEN_DIR = self.OUT_STUDY_AREA / self.scenario
283
+ self.OUT_VULN = self.OUT_SCEN_DIR / "Vulnerability.tiff"
284
+ self.OUT_CODE = self.OUT_SCEN_DIR / "Vulnerability_Code.tiff"
285
+ self.OUT_MASKED_RIVER = self.OUT_SCEN_DIR / "Masked_River_extent.tiff"
286
+ self.OUT_ACCEPT = self.OUT_SCEN_DIR / "Acceptability.tiff"
287
+ self.OUT_ACCEPT_100M = self.OUT_SCEN_DIR / "Acceptability_100m.tiff"
288
+
289
+ else:
290
+ self.scenario = None
291
+
292
+ self.IN_SCEN_DIR = None
293
+ self.IN_RM_BUILD_DIR = None
294
+
295
+ self.TMP_SCEN_DIR = None
296
+ self.TMP_RM_BUILD_DIR = None
297
+ self.TMP_QFILES = None
298
+
299
+ self.TMP_VULN = None
300
+ self.TMP_CODE = None
301
+
302
+ self.OUT_SCEN_DIR = None
303
+ self.OUT_VULN = None
304
+ self.OUT_CODE = None
305
+ self.OUT_MASKED_RIVER = None
306
+ self.OUT_ACCEPT = None
307
+ self.OUT_ACCEPT_100M = None
308
+
309
+ @property
310
+ def is_valid_inputs(self) -> bool:
311
+ return self.check_inputs()
312
+
313
+ @property
314
+ def is_valid_study_area(self) -> bool:
315
+ return self.SA.exists()
316
+
317
+ @property
318
+ def is_valid_vulnerability_csv(self) -> bool:
319
+ return self.VULNERABILITY_CSV.exists()
320
+
321
+ @property
322
+ def is_valid_points_csv(self) -> bool:
323
+ return self.POINTS_CSV.exists()
324
+
325
+ @property
326
+ def is_valid_ponderation_csv(self) -> bool:
327
+ return self.PONDERATION_CSV.exists()
328
+
329
+ def check_files(self) -> str:
330
+ """ Check the files in the directories """
331
+
332
+ files = ""
333
+ for a in self._ALLS:
334
+ if not a.exists():
335
+ files += str(a) + "\n"
336
+
337
+ return files
338
+
339
+ def change_studyarea(self, Study_area:str = None) -> None:
340
+
341
+ if Study_area is None:
342
+ self._study_area = None
343
+ self._scenario = None
344
+ else:
345
+ if Study_area in self.get_list_studyareas(with_suffix=True):
346
+ self._study_area = Path(Study_area)
347
+ else:
348
+ logging.error("The study area does not exist in the study area directory")
349
+
350
+ self.create_paths()
351
+
352
+ def change_scenario(self, scenario:str) -> None:
353
+
354
+ if scenario in self.get_list_scenarios():
355
+ self._scenario = scenario
356
+ self.create_paths_scenario()
357
+ self.check_temporary()
358
+ self.check_outputs()
359
+ else:
360
+ logging.error("The scenario does not exist in the water depth directory")
361
+
362
+ def get_files_in_rm_buildings(self) -> list[Path]:
363
+ return [Path(a) for a in glob.glob(str(self.IN_RM_BUILD_DIR / ("*"+ EXTENT)))]
364
+
365
+ def get_files_in_rasters_vulne(self) -> list[Path]:
366
+ return [Path(a) for a in glob.glob(str(self.TMP_RASTERS_VULNE / "*.tiff"))]
367
+
368
+ def get_layers_in_gdb(self) -> list[str]:
369
+ return [a[0] for a in list_layers(str(self.ORIGINAL_GDB))]
370
+
371
+ def get_layer_types_in_gdb(self) -> list[str]:
372
+ return [a[1] for a in list_layers(str(self.ORIGINAL_GDB))]
373
+
374
+ def get_layers_in_clipgdb(self) -> list[str]:
375
+ return [Path(a).stem for a in glob.glob(str(self.TMP_CLIPGDB / ("*"+ EXTENT)))]
376
+
377
+ def get_layers_in_wmodif(self) -> list[str]:
378
+ return [Path(a).stem for a in glob.glob(str(self.TMP_WMODIF / ("*"+ EXTENT)))]
379
+
380
+ def get_layers_in_codevulne(self) -> list[str]:
381
+ return [Path(a).stem for a in glob.glob(str(self.TMP_CODEVULNE / ("*"+ EXTENT)))]
382
+
383
+ def get_files_in_rasters_code(self) -> list[Path]:
384
+ return [Path(a) for a in glob.glob(str(self.TMP_RASTERS_CODE / "*.tiff"))]
385
+
386
+ def get_q_files(self) -> list[Path]:
387
+ return [Path(a) for a in glob.glob(str(self.TMP_QFILES / "*.tif"))]
388
+
389
+ def get_list_scenarios(self) -> list[str]:
390
+ return [Path(a).stem for a in glob.glob(str(self.IN_WATER_DEPTH / self.SA.stem / "Scenario*"))]
391
+
392
+ def get_list_studyareas(self, with_suffix:bool = False) -> list[str]:
393
+
394
+ if with_suffix:
395
+ return [Path(a).name for a in glob.glob(str(self.IN_STUDY_AREA / "*.shp"))]
396
+ else:
397
+ return [Path(a).stem for a in glob.glob(str(self.IN_STUDY_AREA / "*.shp"))]
398
+
399
+ def get_sims_files_for_scenario(self) -> list[Path]:
400
+
401
+ return [Path(a) for a in glob.glob(str(self.IN_SCEN_DIR / "*.tif"))]
402
+
403
+ def get_sim_file_for_return_period(self, return_period:int) -> Path:
404
+
405
+ sims = self.get_sims_files_for_scenario()
406
+
407
+ if len(sims)==0:
408
+ logging.error("No simulations found")
409
+ return None
410
+
411
+ if "_h.tif" in sims[0].name:
412
+ for cursim in sims:
413
+ if cursim.stem.find("_T{}_".format(return_period)) != -1:
414
+ return cursim
415
+ else:
416
+ for cursim in sims:
417
+ if cursim.stem.find("T{}".format(return_period)) != -1:
418
+ return cursim
419
+
420
+ return None
421
+
422
+ def get_types_in_file(self, file:str) -> list[str]:
423
+ """ Get the types of the geometries in the Shape file """
424
+
425
+ return [a[1] for a in list_layers(str(file))]
426
+
427
+ def is_type_unique(self, file:str) -> bool:
428
+ """ Check if the file contains only one type of geometry """
429
+
430
+ types = self.get_types_in_file(file)
431
+ return len(types) == 1
432
+
433
+ def is_polygons(self, set2test:set) -> bool:
434
+ """ Check if the set contains only polygons """
435
+
436
+ set2test = list(set2test)
437
+ firstone = set2test[0]
438
+ if 'Polygon' in firstone:
439
+ for curtype in set2test:
440
+ if 'Polygon' not in curtype:
441
+ return False
442
+ return True
443
+ else:
444
+ return False
445
+
446
+ def is_same_types(self, file:str) -> tuple[bool, str]:
447
+ """ Check if the file contains only the same type of geometry """
448
+
449
+ types = self.get_types_in_file(file)
450
+
451
+ if len(types) == 1:
452
+ if 'Point' in types[0]:
453
+ return True, 'Point'
454
+ elif 'Polygon' in types[0]:
455
+ return True, 'Polygon'
456
+ elif 'LineString' in types[0]:
457
+ return True, 'LineString'
458
+ else:
459
+ raise ValueError(f"The type of geometry {types[0]} is not recognized")
460
+ else:
461
+ firstone = types[0]
462
+ if 'Point' in firstone:
463
+ for curtype in types:
464
+ if 'Point' not in curtype:
465
+ return False, None
466
+ return True, 'Point'
467
+
468
+ elif 'Polygon' in firstone:
469
+ for curtype in types:
470
+ if 'Polygon' not in curtype:
471
+ return False, None
472
+
473
+ return True, 'Polygon'
474
+
475
+ elif 'LineString' in firstone:
476
+ for curtype in types:
477
+ if 'LineString' not in curtype:
478
+ return False, None
479
+
480
+ return True, 'LineString'
481
+ else:
482
+ raise ValueError(f"The type of geometry {firstone} is not recognized")
483
+
484
+
485
+ def get_return_periods(self) -> list[int]:
486
+ """
487
+ Get the return periods from the simulations
488
+
489
+ :return list[int]: the **sorted list** of return periods
490
+ """
491
+
492
+ # List files in directory
493
+ sims = self.get_sims_files_for_scenario()
494
+
495
+ if len(sims)==0:
496
+ logging.error("No simulations found")
497
+ return None
498
+
499
+ # Two cases:
500
+ # - Return periods are named as T2.tif, T5.tif, T10.tif, ...
501
+ # - Return periods are named as *_T2_h.tif, *_T5_h.tif, *_T10_h.tif, ...
502
+ if "_h.tif" in sims[0].name:
503
+
504
+ # Searching for the position of the return period in the name
505
+ idx_T = [cursim.name.find("_T") for cursim in sims]
506
+ idx_h = [cursim.name.find("_h.tif") for cursim in sims]
507
+
508
+ assert len(idx_T) == len(idx_h), "The number of T and h are not the same"
509
+ for curT, curh in zip(idx_T, idx_h):
510
+ assert curT != -1, "The T is not found"
511
+ assert curh != -1, "The h is not found"
512
+ assert curh > curT, "The h is before the T"
513
+
514
+ # Create the list of return periods -- only the numeric part
515
+ sims = [int(cursim.name[idx_T[i]+2:idx_h[i]]) for i, cursim in enumerate(sims)]
516
+ else:
517
+ # searching for the position of the return period in the name
518
+ idx_T = [cursim.name.find("T") for cursim in sims]
519
+ idx_h = [cursim.name.find(".tif") for cursim in sims]
520
+
521
+ assert len(idx_T) == len(idx_h), "The number of T and h are not the same"
522
+ for curT, curh in zip(idx_T, idx_h):
523
+ assert curT != -1, "The T is not found"
524
+ assert curh != -1, "The h is not found"
525
+ assert curh > curT, "The h is before the T"
526
+
527
+ # create the list of return periods -- only the numeric part
528
+ sims = [int(cursim.name[idx_T[i]+1:idx_h[i]]) for i, cursim in enumerate(sims)]
529
+
530
+ return sorted(sims)
531
+
532
+ def get_ponderations(self) -> pd.DataFrame:
533
+ """ Get the ponderation data from available simulations """
534
+
535
+ rt = self.get_return_periods()
536
+
537
+ if len(rt)==0:
538
+ logging.error("No simulations found")
539
+ return None
540
+
541
+ pond = []
542
+
543
+ pond.append(1./float(rt[0]) + (1./float(rt[0]) - 1./float(rt[1]))/2.)
544
+ for i in range(1, len(rt)-1):
545
+ # Full formula
546
+ # pond.append((1./float(rt[i-1]) - 1./float(rt[i]))/2. + (1./float(rt[i]) - 1./float(rt[i+1]))/2.)
547
+
548
+ # More compact formula
549
+ pond.append((1./float(rt[i-1]) - 1./float(rt[i+1]))/2.)
550
+
551
+ pond.append(1./float(rt[-1]) + (1./float(rt[-2]) - 1./float(rt[-1]))/2.)
552
+
553
+ return pd.DataFrame(pond, columns=["Ponderation"], index=rt)
554
+
555
+ def get_filepath_for_return_period(self, return_period:int) -> Path:
556
+
557
+ return self.get_sim_file_for_return_period(return_period)
558
+
559
+ def change_dir(self) -> None:
560
+ os.chdir(self.main_dir)
561
+ logging.info("Current directory: %s", os.getcwd())
562
+
563
+ def restore_dir(self) -> None:
564
+ os.chdir(self.old_dir)
565
+ logging.info("Current directory: %s", os.getcwd())
566
+
567
+ def check_inputs(self) -> bool:
568
+ """
569
+ Check if the input directories exist.
570
+
571
+ Inputs can not be created automatically. The user must provide them.
572
+
573
+ """
574
+
575
+ err = False
576
+ if not self.IN_DATABASE.exists():
577
+ logging.error("INPUT : The database directory does not exist")
578
+ err = True
579
+
580
+ if not self.IN_STUDY_AREA.exists():
581
+ logging.error("INPUT : The study area directory does not exist")
582
+ err = True
583
+
584
+ if not self.IN_CSV.exists():
585
+ logging.error("INPUT : The CSV directory does not exist")
586
+ err = True
587
+
588
+ if not self.IN_WATER_DEPTH.exists():
589
+ logging.error("INPUT : The water depth directory does not exist")
590
+ err = True
591
+
592
+ if not self.IN_EPU_STATIONS.exists():
593
+ logging.error("INPUT : The EPU stations directory does not exist")
594
+ err = True
595
+
596
+ if self.Study_area is not None:
597
+ if not self.SA.exists():
598
+ logging.error("INPUT : The study area file does not exist")
599
+ err = True
600
+
601
+ if not self.ORIGINAL_GDB.exists():
602
+ logging.error("INPUT : The original gdb file does not exist - Please pull it from the SPW-ARNE")
603
+ err = True
604
+
605
+ if not self.CAPA_WALLOON.exists():
606
+ logging.error("INPUT : The Cadastre Walloon file does not exist - Please pull it from the SPW")
607
+ err = True
608
+
609
+ if not self.PICC_WALLOON.exists():
610
+ logging.error("INPUT : The PICC Walloon file does not exist - Please pull it from the SPW website")
611
+ err = True
612
+
613
+ if not self.CE_IGN_TOP10V.exists():
614
+ logging.error("INPUT : The CE IGN top10v file does not exist - Please pull it from the IGN")
615
+ err = True
616
+
617
+ if self.scenario is None:
618
+ logging.debug("The scenario has not been defined")
619
+ else:
620
+ if not self.IN_SCEN_DIR.exists():
621
+ logging.error("The scenario directory does not exist")
622
+ err = True
623
+
624
+ return not err
625
+
626
+ def check_temporary(self) -> bool:
627
+ """
628
+ Check if the temporary directories exist.
629
+
630
+ If not, create them.
631
+ """
632
+
633
+ self.TMP_DIR.mkdir(parents=True, exist_ok=True)
634
+
635
+ if self.Study_area is not None:
636
+ self.TMP_STUDYAREA.mkdir(parents=True, exist_ok=True)
637
+ self.TMP_DATABASE.mkdir(parents=True, exist_ok=True)
638
+ self.TMP_CLIPGDB.mkdir(parents=True, exist_ok=True)
639
+ self.TMP_CADASTER.mkdir(parents=True, exist_ok=True)
640
+ self.TMP_WMODIF.mkdir(parents=True, exist_ok=True)
641
+ self.TMP_CODEVULNE.mkdir(parents=True, exist_ok=True)
642
+ self.TMP_PICC.mkdir(parents=True, exist_ok=True)
643
+ self.TMP_IGNCE.mkdir(parents=True, exist_ok=True)
644
+ self.TMP_VULN_DIR.mkdir(parents=True, exist_ok=True)
645
+ self.TMP_RASTERS.mkdir(parents=True, exist_ok=True)
646
+ self.TMP_RASTERS_CODE.mkdir(parents=True, exist_ok=True)
647
+ self.TMP_RASTERS_VULNE.mkdir(parents=True, exist_ok=True)
648
+
649
+ if self.scenario is not None:
650
+ self.TMP_SCEN_DIR.mkdir(parents=True, exist_ok=True)
651
+ self.TMP_RM_BUILD_DIR.mkdir(parents=True, exist_ok=True)
652
+ self.TMP_QFILES.mkdir(parents=True, exist_ok=True)
653
+
654
+ return True
655
+
656
+ def check_outputs(self) -> bool:
657
+ """
658
+ Check if the output directories exist.
659
+
660
+ If not, create them.
661
+ """
662
+
663
+ self.OUT_DIR.mkdir(parents=True, exist_ok=True)
664
+
665
+ if self.Study_area is not None:
666
+ self.OUT_STUDY_AREA.mkdir(parents=True, exist_ok=True)
667
+
668
+ if self.scenario is not None:
669
+ self.OUT_SCEN_DIR.mkdir(parents=True, exist_ok=True)
670
+
671
+ return True
672
+
673
+ def check_database_creation(self) -> bool:
674
+ """
675
+ Check if the database files have been created.
676
+ """
677
+
678
+ if not self.SA_DATABASE.exists():
679
+ logging.error("The database file does not exist")
680
+ return False
681
+
682
+ if not self.SA_CAPA.exists():
683
+ logging.error("The Cadastre Walloon file does not exist")
684
+ return False
685
+
686
+ if not self.SA_PICC.exists():
687
+ logging.error("The PICC Walloon file does not exist")
688
+ return False
689
+
690
+ if not self.SA_FINAL.exists():
691
+ logging.error("The final database file does not exist")
692
+ return False
693
+
694
+ if not self.SA_FINAL_V.exists():
695
+ logging.error("The final database with vulnerability levels does not exist")
696
+ return False
697
+
698
+ return True
699
+
700
+ def check_before_database_creation(self) -> bool:
701
+ """ Check if the necessary files are present before the database creation"""
702
+
703
+ if not self.is_valid_inputs:
704
+ logging.error("Theere are missing input directories - Please check carefully the input directories and the logs")
705
+ return False
706
+
707
+ if not self.is_valid_study_area:
708
+ logging.error("The study area file does not exist - Please create it")
709
+ return False
710
+
711
+ if not self.is_valid_vulnerability_csv:
712
+ logging.error("The vulnerability CSV file does not exist - Please create it")
713
+ return False
714
+
715
+ return True
716
+
717
+ def check_before_rasterize(self) -> bool:
718
+
719
+ if not self.TMP_CODEVULNE.exists():
720
+ logging.error("The final database with vulnerability levels does not exist")
721
+ return False
722
+
723
+ if not self.TMP_WMODIF.exists():
724
+ logging.error("The vector data with modifications does not exist")
725
+ return False
726
+
727
+ return True
728
+
729
+ def check_before_vulnerability(self) -> bool:
730
+
731
+ if not self.SA.exists():
732
+ logging.error("The area of interest does not exist")
733
+ return False
734
+
735
+ if not self.IN_WATER_DEPTH.exists():
736
+ logging.error("The water depth directory does not exist")
737
+ return False
738
+
739
+ if not self.IN_SCEN_DIR.exists():
740
+ logging.error("The scenario directory does not exist in the water depth directory")
741
+ return False
742
+
743
+ if not self.SA_MASKED_RIVER.exists():
744
+ logging.error("The IGN raster does not exist")
745
+ return False
746
+
747
+ return True
748
+
749
+ def check_vuln_code_sa(self) -> bool:
750
+
751
+ if not self.SA_VULN.exists():
752
+ logging.error("The vulnerability raster file does not exist")
753
+ return False
754
+
755
+ if not self.SA_CODE.exists():
756
+ logging.error("The vulnerability code raster file does not exist")
757
+ return False
758
+
759
+ return True
760
+
761
+ def check_vuln_code_scenario(self) -> bool:
762
+
763
+ if not self.TMP_VULN.exists():
764
+ logging.error("The vulnerability raster file does not exist")
765
+ return False
766
+
767
+ if not self.TMP_CODE.exists():
768
+ logging.error("The vulnerability code raster file does not exist")
769
+ return False
770
+
771
+ return True
772
+
773
+ def compare_original_clipped_layers(self) -> str:
774
+ """ Compare the original layers with the clipped ones """
775
+
776
+ layers = self.get_layers_in_gdb()
777
+ layers_clip = self.get_layers_in_clipgdb()
778
+
779
+ ret = 'These layers have not been clipped:\n'
780
+ for layer in layers:
781
+ if layer not in layers_clip:
782
+ ret += " - {}\n".format(layer)
783
+
784
+ ret += '\nThese layers have been clipped but are not present in the GDB:\n'
785
+ for layer in layers_clip:
786
+ if layer not in layers:
787
+ ret += " - {}\n".format(layer)
788
+
789
+ ret+='\n'
790
+
791
+ return ret
792
+
793
+ def compare_clipped_raster_layers(self) -> str:
794
+ """ Compare the clipped layers with the rasterized ones """
795
+
796
+ layers = self.get_layers_in_clipgdb()
797
+ layers_rast = self.get_layers_in_codevulne()
798
+
799
+ ret = 'These layers {} have not been rasterized:\n'
800
+ for layer in layers:
801
+ if layer not in layers_rast:
802
+ ret += " - {}\n".format(layer)
803
+
804
+ ret += '\nThese layers have been rasterized but are not in the orginal GDB:\n'
805
+ for layer in layers_rast:
806
+ if layer not in layers:
807
+ ret += " - {}\n".format(layer)
808
+
809
+ ret+='\n'
810
+
811
+ return ret
812
+
813
+ def get_operand(self, file:str) -> Modif_Type:
814
+ """ Get the operand based on the layer name """
815
+ LAYERS_WALOUS = ["WALOUS_2018_LB72_112",
816
+ "WALOUS_2018_LB72_31",
817
+ "WALOUS_2018_LB72_32",
818
+ "WALOUS_2018_LB72_331",
819
+ "WALOUS_2018_LB72_332",
820
+ "WALOUS_2018_LB72_333",
821
+ "WALOUS_2018_LB72_34"]
822
+
823
+ ret, curtype = self.is_same_types(file)
824
+ layer = Path(file).stem
825
+
826
+ if not ret:
827
+ raise ValueError("The layer contains different types of geometries")
828
+
829
+ if layer in LAYERS_WALOUS:
830
+ return Modif_Type.WALOUS
831
+
832
+ elif curtype=="Point":
833
+
834
+ self.points2polys.append(layer)
835
+
836
+ if layer =="BDREF_DGO3_PASH__SCHEMA_STATIONS_EPU":
837
+ return Modif_Type.POINT2POLY_EPURATION
838
+ elif layer =="INFRASIG_SOINS_SANTE__ETAB_AINES":
839
+ return Modif_Type.POINT2POLY_PICC
840
+ else:
841
+ return Modif_Type.POINT2POLY_CAPAPICC
842
+
843
+ elif layer =="Hab_2018_CABU":
844
+ return Modif_Type.INHABITED
845
+
846
+ elif layer =="INFRASIG_ROUTE_RES_ROUTIER_TE_AXES":
847
+
848
+ self.lines2polys.append(layer)
849
+
850
+ return Modif_Type.ROAD
851
+
852
+ else:
853
+ return Modif_Type.COPY
854
+
855
+
856
+ def clip_layer(layer:str,
857
+ file_path:str,
858
+ Study_Area:str,
859
+ output_dir:str):
860
+ """
861
+ Clip the input data based on the selected bassin and saves it
862
+ in separate shape files.
863
+
864
+ As shape file doen not support DateTime, the columns with DateTime
865
+ are converted to string.
866
+
867
+ :param layer: the layer name in the GDB file
868
+ :param file_path: the path to the GDB file
869
+ :param Study_Area: the path to the study area shapefile
870
+ :param output_dir: the path to the output directory
871
+ """
872
+
873
+ layer = str(layer)
874
+ file_path = str(file_path)
875
+ Study_Area = str(Study_Area)
876
+ output_dir = Path(output_dir)
877
+
878
+ St_Area = gpd.read_file(Study_Area, engine=ENGINE)
879
+
880
+ logging.info(layer)
881
+
882
+ # The data is clipped during the reading
883
+ # **It is more efficient than reading the entire data and then clipping it**
884
+ #
885
+ # FIXME: "read_dataframe" is used directly rather than "gpd.read_file" cause
886
+ # the "layer" parameter is well transmitted to the "read_dataframe" function...
887
+ df:gpd.GeoDataFrame = read_dataframe(file_path, layer=layer, mask=St_Area['geometry'][0])
888
+
889
+ if len(df) == 0:
890
+ logging.warning("No data found for layer " + str(layer))
891
+ return "No data found for layer " + str(layer)
892
+
893
+ # Force Lambert72 -> EPSG:31370
894
+ df.to_crs("EPSG:31370", inplace=True)
895
+ try:
896
+ date_columns = df.select_dtypes(include=['datetimetz']).columns.tolist()
897
+ if len(date_columns)>0:
898
+ df[date_columns] = df[date_columns].astype(str)
899
+
900
+ df.to_file(str(output_dir / (layer+EXTENT)), mode='w', engine=ENGINE)
901
+ except Exception as e:
902
+ logging.error("Error while saving the clipped " + str(layer) + " to file")
903
+ logging.error(e)
904
+ pass
905
+
906
+ logging.info("Saved the clipped " + str(layer) + " to file")
907
+ return "Saved the clipped " +str(layer)+ " to file"
908
+
909
+
910
+ def data_modification(layer:str,
911
+ manager:Accept_Manager,
912
+ picc:gpd.GeoDataFrame,
913
+ capa:gpd.GeoDataFrame ):
914
+ """
915
+ Apply the data modifications as described in the LEMA report
916
+
917
+ FIXME : Add more doc in this docstring
918
+
919
+ :param input_database: the path to the input database
920
+ :param layer: the layer name in the database
921
+ :param output_database: the path to the output database
922
+ :param picc: the PICC Walloon file -- Preloaded
923
+ :param capa: the Cadastre Walloon file -- Preloaded
924
+ """
925
+
926
+ df1:gpd.GeoDataFrame
927
+ df2:gpd.GeoDataFrame
928
+
929
+ layer = str(layer)
930
+
931
+ dir_input = manager.TMP_CLIPGDB
932
+ dir_output = manager.TMP_WMODIF
933
+
934
+ input_file = str(dir_input / (layer + EXTENT))
935
+ output_file = str(dir_output / (layer + EXTENT))
936
+
937
+ # Read the data
938
+ df:gpd.GeoDataFrame = gpd.read_file(input_file, engine=ENGINE)
939
+ nblines, _ = df.shape
940
+
941
+ if nblines>0:
942
+ op = manager.get_operand(input_file)
943
+
944
+ if op == Modif_Type.WALOUS:
945
+ # Walous layers changed to PICC buidings
946
+
947
+ assert picc.crs == df.crs, "CRS of PICC and input data do not match"
948
+
949
+ assert "GEOREF_ID" in picc.columns, "The PICC file does not contain the GEOREF_ID column"
950
+ assert "NATUR_CODE" in picc.columns, "The PICC file does not contain the NATUR_CODE column"
951
+
952
+ df1 = gpd.sjoin(picc, df, how="inner", predicate="intersects" )
953
+ cols = df.columns
954
+
955
+ cols = np.append(cols, "GEOREF_ID")
956
+ cols = np.append(cols, "NATUR_CODE")
957
+
958
+ df1 = df1[cols]
959
+
960
+ if df1.shape[0] > 0:
961
+ assert manager.is_polygons(set(df1.geom_type)), f"The layer does not contains polygons - {op}"
962
+ df1.to_file(output_file, engine=ENGINE)
963
+ else:
964
+ logging.warning("No data found for layer " + str(layer))
965
+
966
+ elif op == Modif_Type.POINT2POLY_EPURATION:
967
+ # Change BDREF based on AJOUT_PDET sent by Perrine (SPI)
968
+
969
+ # The original layer is a point layer.
970
+ # The EPU_STATIONS shape file (from SPI) is a polygon layer.
971
+
972
+ df1 = gpd.read_file(str(manager.EPU_STATIONS), engine=ENGINE)
973
+
974
+ assert df1.crs == df.crs, "CRS of AJOUT_PDET and input data do not match"
975
+
976
+ df2 = gpd.sjoin(picc, df1, how="inner", predicate="intersects" )
977
+
978
+ if df2.shape[0] > 0:
979
+ assert manager.is_polygons(set(df2.geom_type)), f"The layer does not contains polygons - {op}"
980
+ df2.to_file(output_file, engine=ENGINE)
981
+ else:
982
+ logging.warning("No data found for layer " + str(layer))
983
+
984
+ elif op == Modif_Type.POINT2POLY_PICC:
985
+ # Select the polygons that contains the points
986
+ # in theCadaster and PICC files
987
+
988
+ assert capa.crs == df.crs, "CRS of CaPa and input data do not match"
989
+ assert "CaPaKey" in capa.columns, "The CaPa file does not contain the CaPaKey column"
990
+
991
+ df1= gpd.sjoin(capa, df, how="inner", predicate="intersects" )
992
+ cols=df.columns
993
+
994
+ cols = np.append(cols, "CaPaKey")
995
+ df1=df1[cols]
996
+ df2=gpd.sjoin(picc, df1, how="inner", predicate="intersects" )
997
+
998
+ if df2.shape[0] > 0:
999
+ assert manager.is_polygons(set(df2.geom_type)), f"The layer does not contains polygons - {op}"
1000
+ df2.to_file(output_file, engine=ENGINE)
1001
+ else:
1002
+ logging.warning("No data found for layer " + str(layer))
1003
+
1004
+ elif op == Modif_Type.POINT2POLY_CAPAPICC:
1005
+
1006
+ # Select the polygons that contains the points
1007
+ # in theCadaster and PICC files
1008
+
1009
+ assert capa.crs == df.crs, "CRS of CaPa and input data do not match"
1010
+ assert picc.crs == df.crs, "CRS of PICC and input data do not match"
1011
+
1012
+ # Join the Layer and CaPa DataFrames : https://geopandas.org/en/stable/docs/reference/api/geopandas.sjoin.html
1013
+ # ‘inner’: use intersection of keys from both dfs; retain only left_df geometry column
1014
+ # "intersects" : Binary predicate. Valid values are determined by the spatial index used.
1015
+ df1= gpd.sjoin(capa, df, how="inner", predicate="intersects" )
1016
+
1017
+ # Retain only the columns of the input data
1018
+ cols = df.columns
1019
+ # but add the CaPaKey
1020
+ cols = np.append(cols, "CaPaKey")
1021
+
1022
+ df1 = df1[cols]
1023
+
1024
+ # Join the df1 and PICC DataFrames : https://geopandas.org/en/stable/docs/reference/api/geopandas.sjoin.html
1025
+ df2 = gpd.sjoin(picc, df1, how="inner", predicate="intersects" )
1026
+
1027
+ # Add only the GEOREF_ID and NATUR_CODE columns from PICC
1028
+ cols = np.append(cols, "GEOREF_ID")
1029
+ cols = np.append(cols, "NATUR_CODE")
1030
+
1031
+ df2 = df2[cols]
1032
+
1033
+ if df2.shape[0] > 0:
1034
+ assert manager.is_polygons(set(df2.geom_type)), f"The layer does not contains polygons - {op}"
1035
+ df2.to_file(output_file, engine=ENGINE)
1036
+ else:
1037
+ logging.warning("No data found for layer " + str(layer))
1038
+
1039
+ elif op == Modif_Type.INHABITED:
1040
+ # Select only the buildings with a number of inhabitants > 0
1041
+ df1=df[df["NbsHabTOT"]>0]
1042
+
1043
+ if df1.shape[0] > 0:
1044
+ assert manager.is_polygons(set(df1.geom_type)), f"The layer does not contains polygons - {op}"
1045
+ df1.to_file(output_file, engine=ENGINE)
1046
+ else:
1047
+ logging.warning("No data found for layer " + str(layer))
1048
+
1049
+ elif op == Modif_Type.ROAD:
1050
+ # Create a buffer around the roads
1051
+ df1=df.buffer(distance=6, cap_style=2)
1052
+
1053
+ if df1.shape[0] > 0:
1054
+ assert set(df1.geom_type) == {'Polygon'}, f"The layer does not contains polygons - {op}"
1055
+ df1.to_file(output_file, engine=ENGINE)
1056
+ else:
1057
+ logging.warning("No data found for layer " + str(layer))
1058
+
1059
+ elif op == Modif_Type.COPY:
1060
+ # just copy the data if it is polygons
1061
+ if manager.is_polygons(set(df.geom_type)):
1062
+ df.to_file(output_file, engine=ENGINE)
1063
+ else:
1064
+ logging.error("The layer does not contains polygons - " + str(layer))
1065
+ else:
1066
+ raise ValueError(f"The operand {op} is not recognized")
1067
+
1068
+ return "Data modification done for " + str(layer)
1069
+ else:
1070
+ # Normally, phase 1 does not create empty files
1071
+ # But it is better to check... ;-)
1072
+ logging.error("skipped" + str(layer) + "due to no polygon in the study area")
1073
+ return "skipped" + str(layer) + "due to no polygon in the study area"
1074
+
1075
+ def vector_to_raster(layer:str,
1076
+ manager:Accept_Manager,
1077
+ attribute:str,
1078
+ pixel_size:float):
1079
+ """
1080
+ Convert a vector layer to a raster tiff file
1081
+
1082
+ :param layer: the layer name in the GDB file
1083
+ :param vector_input: the path to the vector file
1084
+ :param extent: the path to the extent file
1085
+ :param attribute: the attribute to rasterize
1086
+ :param pixel_size: the pixel size of the raster
1087
+
1088
+ """
1089
+
1090
+ old_dir = os.getcwd()
1091
+
1092
+ layer = str(layer)
1093
+
1094
+ vector_input = str(manager.TMP_CODEVULNE / (layer + EXTENT))
1095
+ extent = str(manager.SA)
1096
+ attribute = str(attribute)
1097
+ pixel_size = float(pixel_size)
1098
+
1099
+ out_file = manager.TMP_RASTERS / attribute / (layer + ".tiff")
1100
+
1101
+ if out_file.exists():
1102
+ os.remove(out_file)
1103
+
1104
+ out_file = str(out_file)
1105
+
1106
+ NoData_value = 0
1107
+
1108
+ extent_ds:ogr.DataSource = ogr.Open(extent)
1109
+ extent_layer = extent_ds.GetLayer()
1110
+
1111
+ x_min, x_max, y_min, y_max = extent_layer.GetExtent()
1112
+
1113
+ x_min = float(int(x_min))
1114
+ x_max = float(np.ceil(x_max))
1115
+ y_min = float(int(y_min))
1116
+ y_max = float(np.ceil(y_max))
1117
+
1118
+ # Open the data sources and read the extents
1119
+ source_ds:ogr.DataSource = ogr.Open(vector_input)
1120
+ if source_ds is None:
1121
+ logging.error(f"Could not open the data source {layer}")
1122
+ return
1123
+ source_layer = source_ds.GetLayer()
1124
+
1125
+ # Create the destination data source
1126
+ x_res = int((x_max - x_min) / pixel_size)
1127
+ y_res = int((y_max - y_min) / pixel_size)
1128
+ target_ds:gdal.Driver = gdal.GetDriverByName('GTiff').Create(out_file,
1129
+ x_res, y_res, 1,
1130
+ gdal.GDT_Byte,
1131
+ options=["COMPRESS=LZW"])
1132
+
1133
+ target_ds.SetGeoTransform((x_min, pixel_size, 0, y_max, 0, -pixel_size))
1134
+ srs = osr.SpatialReference()
1135
+ srs.ImportFromEPSG(31370)
1136
+ target_ds.SetProjection(srs.ExportToWkt())
1137
+
1138
+ band = target_ds.GetRasterBand(1)
1139
+ band.SetNoDataValue(NoData_value)
1140
+
1141
+ # Rasterize the areas
1142
+ gdal.RasterizeLayer(target_ds, [1],
1143
+ source_layer,
1144
+ options=["ATTRIBUTE="+attribute,
1145
+ "ALL_TOUCHED=TRUE"])
1146
+ target_ds = None
1147
+
1148
+ def compute_vulnerability(manager:Accept_Manager):
1149
+ """
1150
+ Compute the vulnerability for the Study Area
1151
+
1152
+ This function **will not modify** the data by the removed buildings/scenarios.
1153
+
1154
+ :param dirsnames: the Dirs_Names object from the calling function
1155
+ """
1156
+
1157
+ vuln_csv = Vulnerability_csv(manager.VULNERABILITY_CSV)
1158
+
1159
+ rasters_vuln = manager.get_files_in_rasters_vulne()
1160
+
1161
+ logging.info("Number of files",len(rasters_vuln))
1162
+
1163
+ ds:gdal.Dataset = gdal.Open(str(rasters_vuln[0]))
1164
+
1165
+ tmp_vuln = np.array(ds.GetRasterBand(1).ReadAsArray())
1166
+
1167
+ x, y = tmp_vuln.shape
1168
+
1169
+ logging.info("Computing Vulnerability")
1170
+
1171
+ array_vuln = np.ones((x, y), dtype=np.int8)
1172
+ array_code = np.ones((x, y), dtype=np.int8)
1173
+
1174
+ # Create a JIT function to update the arrays
1175
+ # Faster than the classical Python loop or Numpy
1176
+ @nb.jit(nopython=True, boundscheck=False, inline='always')
1177
+ # @cuda.jit(device=True, inline=True)
1178
+ def update_arrays_jit(tmp_vuln, loccode, array_vuln, array_code):
1179
+ for i in range(tmp_vuln.shape[0]):
1180
+ for j in range(tmp_vuln.shape[1]):
1181
+ if tmp_vuln[i, j] >= array_vuln[i, j]:
1182
+ array_vuln[i, j] = tmp_vuln[i, j]
1183
+ array_code[i, j] = loccode
1184
+
1185
+ return array_vuln, array_code
1186
+
1187
+ for i in tqdm(range(len(rasters_vuln)), 'Computing Vulnerability : '):
1188
+ logging.info("Computing layer {} / {}".format(i, len(rasters_vuln)))
1189
+ ds = gdal.Open(str(rasters_vuln[i]))
1190
+
1191
+ tmp_vuln = ds.GetRasterBand(1).ReadAsArray()
1192
+
1193
+ loccode = vuln_csv.get_vulnerability_code(rasters_vuln[i].stem)
1194
+
1195
+ # We use the jit
1196
+ update_arrays_jit(tmp_vuln, loccode, array_vuln, array_code)
1197
+
1198
+ # ij = np.where(array_vuln == 0)
1199
+ # array_vuln[ij] = 1
1200
+ # array_code[ij] = 1
1201
+
1202
+ logging.info("Saving the computed vulnerability")
1203
+ dst_filename= str(manager.SA_VULN)
1204
+ y_pixels, x_pixels = array_vuln.shape # number of pixels in x
1205
+
1206
+ driver = gdal.GetDriverByName('GTiff')
1207
+ dataset = driver.Create(dst_filename, x_pixels, y_pixels, gdal.GDT_Byte, 1, options=["COMPRESS=LZW"])
1208
+ dataset.GetRasterBand(1).WriteArray(array_vuln.astype(np.int8))
1209
+ # follow code is adding GeoTranform and Projection
1210
+ geotrans = ds.GetGeoTransform() # get GeoTranform from existed 'data0'
1211
+ proj = ds.GetProjection() # you can get from a exsited tif or import
1212
+ dataset.SetGeoTransform(geotrans)
1213
+ dataset.SetProjection(proj)
1214
+ dataset.FlushCache()
1215
+ dataset = None
1216
+
1217
+ logging.info("Saving the computed codes")
1218
+ dst_filename= str(manager.SA_CODE)
1219
+ y_pixels, x_pixels = array_code.shape # number of pixels in x
1220
+ driver = gdal.GetDriverByName('GTiff')
1221
+ dataset = driver.Create(dst_filename, x_pixels, y_pixels, gdal.GDT_Byte, 1, options=["COMPRESS=LZW"])
1222
+ dataset.GetRasterBand(1).WriteArray(array_code.astype(np.int8))
1223
+ # follow code is adding GeoTranform and Projection
1224
+ geotrans = ds.GetGeoTransform() # get GeoTranform from existed 'data0'
1225
+ proj = ds.GetProjection() # you can get from a exsited tif or import
1226
+ dataset.SetGeoTransform(geotrans)
1227
+ dataset.SetProjection(proj)
1228
+ dataset.FlushCache()
1229
+ dataset = None
1230
+
1231
+ logging.info("Computed Vulnerability for the Study Area - Done")
1232
+
1233
+ def compute_vulnerability4scenario(manager:Accept_Manager):
1234
+ """ Compute the vulnerability for the scenario
1235
+
1236
+ This function **will modify** the data by the removed buildings/scenarios.
1237
+
1238
+ FIXME: It could be interseting to permit the user to provide tiff files for the removed buildings and other scenarios.
1239
+
1240
+ :param dirsnames: the Dirs_Names object from the calling function
1241
+ """
1242
+
1243
+ array_vuln = gdal.Open(str(manager.SA_VULN))
1244
+ geotrans = array_vuln.GetGeoTransform() # get GeoTranform from existed 'data0'
1245
+ proj = array_vuln.GetProjection() # you can get from a exsited tif or import
1246
+
1247
+ array_vuln = np.array(array_vuln.GetRasterBand(1).ReadAsArray())
1248
+
1249
+ array_code = gdal.Open(str(manager.SA_CODE))
1250
+ array_code = np.array(array_code.GetRasterBand(1).ReadAsArray())
1251
+
1252
+ Rbu = manager.get_files_in_rm_buildings()
1253
+
1254
+ if len(Rbu)>0:
1255
+ for curfile in Rbu:
1256
+ array_mod = gdal.Open(str(curfile))
1257
+ array_mod = np.array(array_mod.GetRasterBand(1).ReadAsArray())
1258
+
1259
+ ij = np.where(array_mod == 1)
1260
+ array_vuln[ij] = 1
1261
+ array_code[ij] = 1
1262
+
1263
+ dst_filename= str(manager.TMP_VULN)
1264
+ y_pixels, x_pixels = array_vuln.shape # number of pixels in x
1265
+
1266
+ driver = gdal.GetDriverByName('GTiff')
1267
+ dataset = driver.Create(dst_filename, x_pixels, y_pixels, gdal.GDT_Byte, 1, options=["COMPRESS=LZW"])
1268
+ dataset.GetRasterBand(1).WriteArray(array_vuln.astype(np.int8))
1269
+ # follow code is adding GeoTranform and Projection
1270
+ dataset.SetGeoTransform(geotrans)
1271
+ dataset.SetProjection(proj)
1272
+ dataset.FlushCache()
1273
+ dataset = None
1274
+
1275
+
1276
+ dst_filename= str(manager.TMP_CODE)
1277
+ y_pixels, x_pixels = array_code.shape # number of pixels in x
1278
+ driver = gdal.GetDriverByName('GTiff')
1279
+ dataset = driver.Create(dst_filename, x_pixels, y_pixels, gdal.GDT_Byte, 1, options=["COMPRESS=LZW"])
1280
+ dataset.GetRasterBand(1).WriteArray(array_code.astype(np.int8))
1281
+ # follow code is adding GeoTranform and Projection
1282
+ dataset.SetGeoTransform(geotrans)
1283
+ dataset.SetProjection(proj)
1284
+ dataset.FlushCache()
1285
+ dataset = None
1286
+
1287
+ logging.info("Computed Vulnerability for the scenario")
1288
+
1289
+ def match_vulnerability2sim(inRas:Path, outRas:Path, MODREC:Path):
1290
+ """
1291
+ Clip the raster to the MODREC/simulation extent
1292
+
1293
+ :param inRas: the input raster file
1294
+ :param outRas: the output raster file
1295
+ :param MODREC: the MODREC/simulation extent file
1296
+
1297
+ """
1298
+
1299
+ inRas = str(inRas)
1300
+ outRas = str(outRas)
1301
+ MODREC = str(MODREC)
1302
+
1303
+ data = gdal.Open(MODREC, gdalconst.GA_ReadOnly)
1304
+ geoTransform = data.GetGeoTransform()
1305
+ minx = geoTransform[0]
1306
+ maxy = geoTransform[3]
1307
+ maxx = minx + geoTransform[1] * data.RasterXSize
1308
+ miny = maxy + geoTransform[5] * data.RasterYSize
1309
+ ds = gdal.Open(inRas)
1310
+ ds = gdal.Translate(outRas, ds, projWin = [minx, maxy, maxx, miny])
1311
+ ds = None
1312
+
1313
+ def compute_acceptability(manager:Accept_Manager,
1314
+ model_h:np.ndarray,
1315
+ vulnerability:np.ndarray,
1316
+ interval:int,
1317
+ geo_projection):
1318
+
1319
+ """
1320
+ Compute the local acceptability based on :
1321
+ - the vulnerability
1322
+ - the water depth
1323
+ - the matrices
1324
+
1325
+ :param dirsnames: the Dirs_Names object from the calling function
1326
+ :param model_h: the water depth matrix
1327
+ :param vulnerability: the vulnerability matrix
1328
+ :param interval: the return period
1329
+ :param geo_projection: the geotransform and the projection - tuple extracted from another raster file
1330
+
1331
+ """
1332
+
1333
+ logging.info(interval)
1334
+
1335
+ Qfile = pd.read_csv(manager.POINTS_CSV)
1336
+
1337
+ Qfile = Qfile[Qfile["Interval"]==interval]
1338
+ Qfile = Qfile.reset_index()
1339
+
1340
+ x,y = vulnerability.shape
1341
+ accept = np.zeros((x,y))
1342
+
1343
+ ij_1 = np.where(vulnerability == 1)
1344
+ ij_2 = np.where(vulnerability == 2)
1345
+ ij_3 = np.where(vulnerability == 3)
1346
+ ij_4 = np.where(vulnerability == 4)
1347
+ ij_5 = np.where(vulnerability == 5)
1348
+
1349
+ bounds = [(0., 0.02), (0.02, 0.3), (0.3, 1), (1, 2.5), (2.5, 1000)]
1350
+
1351
+ accept_1 = [Qfile["h-0"][4], Qfile["h-0.02"][4], Qfile["h-0.3"][4], Qfile["h-1"][4], Qfile["h-2.5"][4]]
1352
+ accept_2 = [Qfile["h-0"][3], Qfile["h-0.02"][3], Qfile["h-0.3"][3], Qfile["h-1"][3], Qfile["h-2.5"][3]]
1353
+ accept_3 = [Qfile["h-0"][2], Qfile["h-0.02"][2], Qfile["h-0.3"][2], Qfile["h-1"][2], Qfile["h-2.5"][2]]
1354
+ accept_4 = [Qfile["h-0"][1], Qfile["h-0.02"][1], Qfile["h-0.3"][1], Qfile["h-1"][1], Qfile["h-2.5"][1]]
1355
+ accept_5 = [Qfile["h-0"][0], Qfile["h-0.02"][0], Qfile["h-0.3"][0], Qfile["h-1"][0], Qfile["h-2.5"][0]]
1356
+
1357
+ accept[:,:] = -99999
1358
+ for ij, loc_accept in zip([ij_1, ij_2, ij_3, ij_4, ij_5], [accept_1, accept_2, accept_3, accept_4, accept_5]):
1359
+ if len(ij[0]) > 0:
1360
+ for idx, (min_bound, max_bound) in enumerate(bounds):
1361
+ loc_ij = np.where((model_h[ij] > min_bound) & (model_h[ij] <= max_bound))
1362
+ accept[ij[0][loc_ij], ij[1][loc_ij]] = loc_accept[idx]
1363
+
1364
+ #save raster
1365
+ dst_filename = str(manager.TMP_QFILES / "Q{}.tif".format(interval))
1366
+
1367
+ y_pixels, x_pixels = accept.shape # number of pixels in x
1368
+ driver = gdal.GetDriverByName('GTiff')
1369
+ dataset = driver.Create(dst_filename, x_pixels, y_pixels, 1, gdal.GDT_Float32, options=["COMPRESS=LZW"])
1370
+ dataset.GetRasterBand(1).WriteArray(accept.astype(np.float32))
1371
+
1372
+ geotrans, proj = geo_projection
1373
+ dataset.SetGeoTransform(geotrans)
1374
+ dataset.SetProjection(proj)
1375
+ dataset.FlushCache()
1376
+ dataset = None
1377
+
1378
+ def shp_to_raster(vector_fn:str, raster_fn:str, pixel_size:float = 1.):
1379
+ """
1380
+ Convert a vector layer to a raster tiff file.
1381
+
1382
+ The raster will contain only 2 values : 0 and 1
1383
+
1384
+ - 1 : the inside of the vector layer
1385
+ - 0 : the rest == NoData/NullValue
1386
+
1387
+ :param vector_fn: the path to the vector file
1388
+ :param raster_fn: the path to the raster file
1389
+ :param pixel_size: the pixel size of the raster
1390
+ """
1391
+
1392
+ # Force the input to be a string
1393
+ vector_fn = str(vector_fn)
1394
+ raster_fn = str(raster_fn)
1395
+
1396
+ NoData_value = 0 # np.nan is not necessary a good idea
1397
+ # Open the data sources and read the extents
1398
+ source_ds = ogr.Open(vector_fn)
1399
+ source_layer = source_ds.GetLayer()
1400
+ x_min, x_max, y_min, y_max = source_layer.GetExtent()
1401
+
1402
+ x_min = float(int(x_min))
1403
+ x_max = float(np.ceil(x_max))
1404
+ y_min = float(int(y_min))
1405
+ y_max = float(np.ceil(y_max))
1406
+
1407
+ # Create the destination data source
1408
+ x_res = int((x_max - x_min) / pixel_size)
1409
+ y_res = int((y_max - y_min) / pixel_size)
1410
+
1411
+ target_ds = gdal.GetDriverByName('GTiff').Create(raster_fn, x_res, y_res, 1, gdal.GDT_Byte,
1412
+ options=["COMPRESS=LZW"])
1413
+
1414
+ target_ds.SetGeoTransform((x_min, pixel_size, 0, y_max, 0, -pixel_size))
1415
+ srs = osr.SpatialReference()
1416
+ srs.ImportFromEPSG(31370)
1417
+ target_ds.SetProjection(srs.ExportToWkt())
1418
+ band = target_ds.GetRasterBand(1)
1419
+ band.SetNoDataValue(NoData_value)
1420
+ # Rasterize the areas
1421
+ gdal.RasterizeLayer(target_ds,
1422
+ bands = [1],
1423
+ layer = source_layer,
1424
+ burn_values = [1],
1425
+ options=["ALL_TOUCHED=TRUE"])
1426
+ target_ds = None
1427
+ vector_fn = raster_fn = None